lang
stringclasses
1 value
license
stringclasses
13 values
stderr
stringlengths
0
350
commit
stringlengths
40
40
returncode
int64
0
128
repos
stringlengths
7
45.1k
new_contents
stringlengths
0
1.87M
new_file
stringlengths
6
292
old_contents
stringlengths
0
1.87M
message
stringlengths
6
9.26k
old_file
stringlengths
6
292
subject
stringlengths
0
4.45k
Java
apache-2.0
b529b62a4f2e4b7482564ad6c65996c8f5b0d824
0
gustavoanatoly/flink,greghogan/flink,zentol/flink,kaibozhou/flink,mbode/flink,GJL/flink,mylog00/flink,greghogan/flink,gyfora/flink,lincoln-lil/flink,StephanEwen/incubator-flink,yew1eb/flink,aljoscha/flink,fhueske/flink,haohui/flink,mtunique/flink,DieBauer/flink,rmetzger/flink,fanyon/flink,xccui/flink,gustavoanatoly/flink,hwstreaming/flink,xccui/flink,lincoln-lil/flink,jinglining/flink,zimmermatt/flink,bowenli86/flink,mbode/flink,PangZhi/flink,zjureel/flink,tillrohrmann/flink,Xpray/flink,zohar-mizrahi/flink,wwjiang007/flink,tzulitai/flink,tony810430/flink,xccui/flink,gustavoanatoly/flink,zohar-mizrahi/flink,tzulitai/flink,mtunique/flink,twalthr/flink,WangTaoTheTonic/flink,rmetzger/flink,greghogan/flink,rmetzger/flink,godfreyhe/flink,shaoxuan-wang/flink,bowenli86/flink,shaoxuan-wang/flink,StephanEwen/incubator-flink,kl0u/flink,kaibozhou/flink,sunjincheng121/flink,fhueske/flink,zjureel/flink,xccui/flink,clarkyzl/flink,haohui/flink,hequn8128/flink,tony810430/flink,WangTaoTheTonic/flink,mbode/flink,hequn8128/flink,mylog00/flink,zjureel/flink,gustavoanatoly/flink,apache/flink,zentol/flink,tillrohrmann/flink,apache/flink,zentol/flink,tillrohrmann/flink,lincoln-lil/flink,DieBauer/flink,clarkyzl/flink,PangZhi/flink,darionyaphet/flink,godfreyhe/flink,xccui/flink,bowenli86/flink,StephanEwen/incubator-flink,gyfora/flink,yew1eb/flink,hongyuhong/flink,tony810430/flink,GJL/flink,greghogan/flink,zhangminglei/flink,greghogan/flink,tony810430/flink,lincoln-lil/flink,PangZhi/flink,bowenli86/flink,mbode/flink,xccui/flink,shaoxuan-wang/flink,PangZhi/flink,sunjincheng121/flink,Xpray/flink,twalthr/flink,fhueske/flink,wwjiang007/flink,mylog00/flink,fhueske/flink,apache/flink,Xpray/flink,sunjincheng121/flink,tzulitai/flink,mtunique/flink,aljoscha/flink,darionyaphet/flink,darionyaphet/flink,tzulitai/flink,rmetzger/flink,tillrohrmann/flink,twalthr/flink,godfreyhe/flink,zimmermatt/flink,kl0u/flink,gyfora/flink,haohui/flink,kl0u/flink,ueshin/apache-flink,twalthr/flink,GJL/flink,fhueske/flink,mtunique/flink,tony810430/flink,fanzhidongyzby/flink,apache/flink,yew1eb/flink,hequn8128/flink,hequn8128/flink,wwjiang007/flink,zjureel/flink,godfreyhe/flink,gyfora/flink,zohar-mizrahi/flink,zhangminglei/flink,kl0u/flink,GJL/flink,ueshin/apache-flink,jinglining/flink,fanzhidongyzby/flink,fhueske/flink,clarkyzl/flink,zentol/flink,shaoxuan-wang/flink,hwstreaming/flink,WangTaoTheTonic/flink,godfreyhe/flink,DieBauer/flink,zentol/flink,kaibozhou/flink,hequn8128/flink,hwstreaming/flink,jinglining/flink,tzulitai/flink,gustavoanatoly/flink,apache/flink,zentol/flink,aljoscha/flink,bowenli86/flink,greghogan/flink,rmetzger/flink,rmetzger/flink,fanyon/flink,zimmermatt/flink,yew1eb/flink,kl0u/flink,hongyuhong/flink,xccui/flink,gyfora/flink,GJL/flink,godfreyhe/flink,sunjincheng121/flink,wwjiang007/flink,fanzhidongyzby/flink,lincoln-lil/flink,zimmermatt/flink,gyfora/flink,zjureel/flink,kaibozhou/flink,kl0u/flink,StephanEwen/incubator-flink,DieBauer/flink,gyfora/flink,StephanEwen/incubator-flink,DieBauer/flink,zhangminglei/flink,zentol/flink,tony810430/flink,tillrohrmann/flink,hwstreaming/flink,wwjiang007/flink,tzulitai/flink,fanzhidongyzby/flink,hongyuhong/flink,twalthr/flink,zjureel/flink,mylog00/flink,zjureel/flink,zimmermatt/flink,apache/flink,sunjincheng121/flink,zhangminglei/flink,aljoscha/flink,zohar-mizrahi/flink,tillrohrmann/flink,tony810430/flink,hongyuhong/flink,tillrohrmann/flink,aljoscha/flink,fanyon/flink,mtunique/flink,fanzhidongyzby/flink,haohui/flink,ueshin/apache-flink,aljoscha/flink,WangTaoTheTonic/flink,hongyuhong/flink,fanyon/flink,PangZhi/flink,WangTaoTheTonic/flink,kaibozhou/flink,ueshin/apache-flink,mbode/flink,hequn8128/flink,zohar-mizrahi/flink,StephanEwen/incubator-flink,bowenli86/flink,lincoln-lil/flink,clarkyzl/flink,shaoxuan-wang/flink,Xpray/flink,darionyaphet/flink,jinglining/flink,twalthr/flink,darionyaphet/flink,jinglining/flink,lincoln-lil/flink,wwjiang007/flink,Xpray/flink,godfreyhe/flink,hwstreaming/flink,rmetzger/flink,shaoxuan-wang/flink,sunjincheng121/flink,ueshin/apache-flink,GJL/flink,clarkyzl/flink,zhangminglei/flink,fanyon/flink,jinglining/flink,mylog00/flink,haohui/flink,twalthr/flink,apache/flink,kaibozhou/flink,wwjiang007/flink,yew1eb/flink
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.graph.test; import com.google.common.base.Charsets; import com.google.common.io.Files; import org.apache.flink.graph.example.LabelPropagationExample; import org.apache.flink.test.util.MultipleProgramsTestBase; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; @RunWith(Parameterized.class) public class LabelPropagationExampleITCase extends MultipleProgramsTestBase { public LabelPropagationExampleITCase(ExecutionMode mode){ super(mode); } private String resultPath; private String expectedResult; @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Before public void before() throws Exception{ resultPath = tempFolder.newFile().toURI().toString(); } @After public void after() throws Exception{ compareResultsByLinesInMemory(expectedResult, resultPath); } @Test public void testSingleIteration() throws Exception { /* * Test one iteration of label propagation example with a simple graph */ final String vertices = "1 10\n" + "2 10\n" + "3 30\n" + "4 40\n" + "5 40\n" + "6 40\n" + "7 70\n"; final String edges = "1 3\n" + "2 3\n" + "4 7\n" + "5 7\n" + "6 7\n" + "7 3\n"; String verticesPath = createTempFile(vertices); String edgesPath = createTempFile(edges); LabelPropagationExample.main(new String[] {verticesPath, edgesPath, resultPath, "7", "1"}); expectedResult = "1,10\n" + "2,10\n" + "3,10\n" + "4,40\n" + "5,40\n" + "6,40\n" + "7,40\n"; } @Test public void testTieBreaker() throws Exception { /* * Test the label propagation example where a tie must be broken */ final String vertices = "1 10\n" + "2 10\n" + "3 10\n" + "4 10\n" + "5 0\n" + "6 20\n" + "7 20\n" + "8 20\n" + "9 20\n"; final String edges = "1 5\n" + "2 5\n" + "3 5\n" + "4 5\n" + "6 5\n" + "7 5\n" + "8 5\n" + "9 5\n"; String verticesPath = createTempFile(vertices); String edgesPath = createTempFile(edges); LabelPropagationExample.main(new String[] {verticesPath, edgesPath, resultPath, "9", "1"}); expectedResult = "1,10\n" + "2,10\n" + "3,10\n" + "4,10\n" + "5,20\n" + "6,20\n" + "7,20\n" + "8,20\n" + "9,20\n"; } @Test public void testTermination() throws Exception { /* * Test the label propagation example where the algorithm terminates on the first iteration */ final String vertices = "1 10\n" + "2 10\n" + "3 10\n" + "4 40\n" + "5 40\n" + "6 40\n"; final String edges = "1 2\n" + "2 3\n" + "3 1\n" + "4 5\n" + "5 6\n" + "6 4\n"; String verticesPath = createTempFile(vertices); String edgesPath = createTempFile(edges); LabelPropagationExample.main(new String[]{verticesPath, edgesPath, resultPath, "6", "2"}); expectedResult = "1,10\n" + "2,10\n" + "3,10\n" + "4,40\n" + "5,40\n" + "6,40\n"; } // ------------------------------------------------------------------------- // Util methods // ------------------------------------------------------------------------- private String createTempFile(final String rows) throws Exception { File tempFile = tempFolder.newFile(); Files.write(rows, tempFile, Charsets.UTF_8); return tempFile.toURI().toString(); } }
flink-staging/flink-gelly/src/test/java/org/apache/flink/graph/test/LabelPropagationExampleITCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.graph.test; import com.google.common.base.Charsets; import com.google.common.io.Files; import org.apache.flink.graph.example.LabelPropagationExample; import org.apache.flink.test.util.MultipleProgramsTestBase; import org.junit.After; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; @RunWith(Parameterized.class) public class LabelPropagationExampleITCase extends MultipleProgramsTestBase { public LabelPropagationExampleITCase(ExecutionMode mode){ super(mode); } private String resultPath; private String expectedResult; private String verticesPath; private String edgesPath; @Rule public TemporaryFolder tempFolder = new TemporaryFolder(); @Before public void before() throws Exception{ resultPath = tempFolder.newFile().toURI().toString(); final String vertices = "1 1\n" + "2 2\n" + "3 3\n" + "4 4\n" + "5 5\n"; final String edges = "1 2\n" + "1 3\n" + "2 3\n" + "3 4\n" + "3 5\n" + "4 5\n" + "5 1\n"; File verticesFile = tempFolder.newFile(); Files.write(vertices, verticesFile, Charsets.UTF_8); File edgesFile = tempFolder.newFile(); Files.write(edges, edgesFile, Charsets.UTF_8); verticesPath = verticesFile.toURI().toString(); edgesPath = edgesFile.toURI().toString(); } @After public void after() throws Exception{ compareResultsByLinesInMemory(expectedResult, resultPath); } @Test public void testLabelPropagation() throws Exception { /* * Test the label propagation example */ LabelPropagationExample.main(new String[] {verticesPath, edgesPath, resultPath, "5", "16"}); expectedResult = "1,5\n" + "2,5\n" + "3,5\n" + "4,5\n" + "5,5\n"; } }
[FLINK-1522][FLINK-1576][gelly] Added more test cases for Label Propagation This closes #441
flink-staging/flink-gelly/src/test/java/org/apache/flink/graph/test/LabelPropagationExampleITCase.java
[FLINK-1522][FLINK-1576][gelly] Added more test cases for Label Propagation
Java
apache-2.0
45d142db694bf21be60fbb6841b1c578f9c3064b
0
vipshop/Saturn,vipshop/Saturn,vipshop/Saturn,vipshop/Saturn,vipshop/Saturn
/** * Copyright 2016 vip.com. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * </p> */ package com.vip.saturn.job.console.service.impl; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.vip.saturn.job.console.SaturnEnvProperties; import com.vip.saturn.job.console.domain.NamespaceDomainInfo; import com.vip.saturn.job.console.domain.RegistryCenterClient; import com.vip.saturn.job.console.domain.RegistryCenterConfiguration; import com.vip.saturn.job.console.domain.ZkCluster; import com.vip.saturn.job.console.exception.SaturnJobConsoleException; import com.vip.saturn.job.console.exception.SaturnJobConsoleHttpException; import com.vip.saturn.job.console.mybatis.entity.NamespaceInfo; import com.vip.saturn.job.console.mybatis.entity.NamespaceZkClusterMapping; import com.vip.saturn.job.console.mybatis.entity.SystemConfig; import com.vip.saturn.job.console.mybatis.entity.ZkClusterInfo; import com.vip.saturn.job.console.mybatis.service.NamespaceInfoService; import com.vip.saturn.job.console.mybatis.service.NamespaceZkClusterMapping4SqlService; import com.vip.saturn.job.console.mybatis.service.ZkClusterInfoService; import com.vip.saturn.job.console.repository.zookeeper.CuratorRepository; import com.vip.saturn.job.console.service.RegistryCenterService; import com.vip.saturn.job.console.service.SystemConfigService; import com.vip.saturn.job.console.service.cache.DashboardLeaderHandler; import com.vip.saturn.job.console.service.helper.SystemConfigProperties; import com.vip.saturn.job.console.service.helper.ZkClusterMappingUtils; import com.vip.saturn.job.console.utils.*; import com.vip.saturn.job.integrate.service.ReportAlarmService; import com.vip.saturn.job.integrate.service.UpdateJobConfigService; import com.vip.saturn.job.sharding.NamespaceShardingManager; import com.vip.saturn.job.sharding.listener.AbstractConnectionListener; import jxl.Workbook; import jxl.write.Label; import jxl.write.WritableSheet; import jxl.write.WritableWorkbook; import org.apache.commons.lang3.StringUtils; import org.apache.curator.framework.CuratorFramework; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.CollectionUtils; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.annotation.Resource; import java.io.File; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.*; public class RegistryCenterServiceImpl implements RegistryCenterService { protected static final String DEFAULT_CONSOLE_CLUSTER_ID = "default"; protected static final String NAMESPACE_CREATOR_NAME = "REST_API"; protected static final String ERR_MSG_TEMPLATE_FAIL_TO_CREATE = "Fail to create new namespace {%s} for reason {%s}"; protected static final String ERR_MSG_NS_NOT_FOUND = "The namespace does not exists."; protected static final String ERR_MSG_NS_ALREADY_EXIST = "Invalid request. Namespace: {%s} already existed"; private static final Logger log = LoggerFactory.getLogger(RegistryCenterServiceImpl.class); @Resource private CuratorRepository curatorRepository; @Resource private ReportAlarmService reportAlarmService; @Resource private UpdateJobConfigService updateJobConfigService; @Resource private ZkClusterInfoService zkClusterInfoService; @Resource private SystemConfigService systemConfigService; @Resource private NamespaceZkClusterMapping4SqlService namespaceZkClusterMapping4SqlService; @Resource private NamespaceInfoService namespaceInfoService; /** * 为保证values有序 **/ private LinkedHashMap<String, ZkCluster> zkClusterMap = new LinkedHashMap<>(); private ConcurrentHashMap<String, DashboardLeaderHandler> dashboardLeaderTreeCacheMap = new ConcurrentHashMap<>(); // namespace is unique in all zkClusters private ConcurrentHashMap<String /** nns */ , RegistryCenterClient> registryCenterClientMap = new ConcurrentHashMap<>(); private ConcurrentHashMap<String, Object> nnsLock = new ConcurrentHashMap<>(); // maybe could remove in right time // namespace is unique in all zkClusters private ConcurrentHashMap<String /** nns **/ , NamespaceShardingManager> namespaceShardingListenerManagerMap = new ConcurrentHashMap<>(); private List<String> allOnlineNamespaces = new ArrayList<>(); private String consoleClusterId; private Set<String> restrictComputeZkClusterKeys = Sets.newHashSet(); private Timer localRefreshTimer = null; private Timer localRefreshIfNecessaryTimer = null; private ExecutorService localRefreshThreadPool = null; @PostConstruct public void init() { getConsoleClusterId(); localRefresh(); initLocalRefreshThreadPool(); startLocalRefreshTimer(); startLocalRefreshIfNecessaryTimer(); } private void getConsoleClusterId() { if (StringUtils.isBlank(SaturnEnvProperties.VIP_SATURN_CONSOLE_CLUSTER_ID)) { log.info( "No environment variable or system property of [VIP_SATURN_CONSOLE_CLUSTER] is set. Use the default Id"); consoleClusterId = DEFAULT_CONSOLE_CLUSTER_ID; } else { consoleClusterId = SaturnEnvProperties.VIP_SATURN_CONSOLE_CLUSTER_ID; } } @PreDestroy public void destroy() { Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { closeZkCluster(iterator.next().getValue()); } if (localRefreshTimer != null) { localRefreshTimer.cancel(); } if (localRefreshIfNecessaryTimer != null) { localRefreshIfNecessaryTimer.cancel(); } if (localRefreshThreadPool != null) { localRefreshThreadPool.shutdownNow(); } } private void initLocalRefreshThreadPool() { localRefreshThreadPool = Executors .newSingleThreadExecutor(new ConsoleThreadFactory("refresh-RegCenter-thread", false)); } private void startLocalRefreshTimer() { localRefreshTimer = new Timer("refresh-RegCenter-timer", true); // 每隔5分钟刷新一次 localRefreshTimer.scheduleAtFixedRate(new TimerTask() { @Override public void run() { try { localRefreshThreadPool.submit(new Runnable() { @Override public void run() { try { localRefresh(); } catch (Exception e) { log.error(e.getMessage(), e); } } }); } catch (Exception e) { log.error(e.getMessage(), e); } } }, 1000L * 60 * 5, 1000L * 60 * 5); } private void startLocalRefreshIfNecessaryTimer() { localRefreshIfNecessaryTimer = new Timer("refresh-RegCenter-if-necessary-timer", true); localRefreshIfNecessaryTimer.schedule(new TimerTask() { private String lastUuid = null; @Override public void run() { try { String uuid = systemConfigService .getValueDirectly(SystemConfigProperties.REFRESH_REGISTRY_CENTER_UUID); if (StringUtils.isBlank(uuid)) { notifyRefreshRegCenter(); } else if (!uuid.equals(lastUuid)) { lastUuid = uuid; localRefreshThreadPool.submit(new Runnable() { @Override public void run() { try { localRefresh(); } catch (Exception e) { log.error(e.getMessage(), e); } } }); } } catch (Exception e) { log.error(e.getMessage(), e); } } }, 1000, 1000); } private synchronized void localRefresh() { try { log.info("Start refresh RegCenter"); long startTime = System.currentTimeMillis(); refreshRestrictComputeZkClusters(); if (restrictComputeZkClusterKeys.isEmpty()) { log.warn("根据Console的集群ID:" + consoleClusterId + ",找不到配置可以参与Sharding和Dashboard计算的zk集群"); return; } refreshRegistryCenter(); refreshDashboardLeaderTreeCache(); refreshNamespaceShardingListenerManagerMap(); log.info("End refresh RegCenter, cost {}ms", System.currentTimeMillis() - startTime); } catch (Exception e) { log.error("refresh RegCenter error", e); } } /** * 解析Console集群和zk的映射关系 数据库中配置的例子如下: CONSOLE-1:/saturn,/forVdos;CONSOLE-2:/zk3; 如果不存在此配置项,则可以计算所有zk集群; */ private void refreshRestrictComputeZkClusters() throws SaturnJobConsoleException { // clear 当前可计算的zkCluster集群列表 restrictComputeZkClusterKeys.clear(); String allMappingStr = systemConfigService.getValueDirectly(SystemConfigProperties.CONSOLE_ZK_CLUSTER_MAPPING); if (StringUtils.isBlank(allMappingStr)) { log.info( "CONSOLE_ZK_CLUSTER_MAPPING is not configured in sys_config, so all zk clusters can be computed by this console"); restrictComputeZkClusterKeys.addAll(getZkClusterKeys()); return; } allMappingStr = StringUtils.deleteWhitespace(allMappingStr); String[] singleConsoleMappingArray = allMappingStr.split(";"); for (String singleConsoleMappingStr : singleConsoleMappingArray) { String[] consoleAndClusterKeyArray = singleConsoleMappingStr.split(":"); if (consoleAndClusterKeyArray.length != 2) { throw new SaturnJobConsoleException("the CONSOLE_ZK_CLUSTER_MAPPING(" + Arrays.toString(consoleAndClusterKeyArray) + ") format is not correct, should be like console_cluster_id:zk_cluster_id"); } String tempConsoleClusterId = consoleAndClusterKeyArray[0]; String zkClusterKeyStr = consoleAndClusterKeyArray[1]; if (consoleClusterId.equals(tempConsoleClusterId)) { String[] zkClusterKeyArray = zkClusterKeyStr.trim().split(","); restrictComputeZkClusterKeys.addAll(Arrays.asList(zkClusterKeyArray)); log.info("the current console cluster:{} can do sharding and dashboard to zk clusters:{}", consoleClusterId, restrictComputeZkClusterKeys); return; } } } /** * 判断该集群是否能被本Console计算 */ private boolean isZKClusterCanBeComputed(String clusterKey) { if (CollectionUtils.isEmpty(restrictComputeZkClusterKeys)) { return false; } return restrictComputeZkClusterKeys.contains(clusterKey); } /** * 判断是否同机房 */ private boolean isCurrentConsoleInTheSameIdc(String clusterKey) { return ZkClusterMappingUtils.isCurrentConsoleInTheSameIdc(systemConfigService, clusterKey); } private String generateShardingLeadershipHostValue() { return LocalHostService.cachedIpAddress + "-" + UUID.randomUUID().toString(); } /** * 创建或者移除namespaceShardingManager. */ private void refreshNamespaceShardingListenerManagerMap() { Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); ZkCluster zkCluster = next.getValue(); ArrayList<RegistryCenterConfiguration> regCenterConfList = zkCluster.getRegCenterConfList(); if (regCenterConfList == null) { continue; } for (RegistryCenterConfiguration conf : regCenterConfList) { String nns = conf.getNameAndNamespace(); if (!namespaceShardingListenerManagerMap.containsKey(nns)) { if (isZKClusterCanBeComputed(conf.getZkClusterKey())) { createNamespaceShardingManager(conf, nns); } } else { NamespaceShardingManager namespaceShardingManager = namespaceShardingListenerManagerMap.get(nns); if (!isZKClusterCanBeComputed(conf.getZkClusterKey())) { namespaceShardingManager.stopWithCurator(); namespaceShardingListenerManagerMap.remove(nns); } } } } } private void createNamespaceShardingManager(RegistryCenterConfiguration conf, String nns) { try { log.info("Start NamespaceShardingManager {}", nns); String namespace = conf.getNamespace(); String digest = conf.getDigest(); CuratorFramework client = curatorRepository.connect(conf.getZkAddressList(), namespace, digest); if (client == null) { log.warn("fail to connect to zk during create NamespaceShardingManager"); return; } NamespaceShardingManager namespaceShardingManager = null; try { namespaceShardingManager = new NamespaceShardingManager(client, namespace, generateShardingLeadershipHostValue(), reportAlarmService, updateJobConfigService); namespaceShardingManager.start(); if (namespaceShardingListenerManagerMap.putIfAbsent(nns, namespaceShardingManager) != null) { // 已经存在,则关闭当前的client try { namespaceShardingManager.stopWithCurator(); } catch (Exception e) { log.error(e.getMessage(), e); } } else { log.info("Done starting NamespaceShardingManager {}", nns); } } catch (Exception e) { log.error(e.getMessage(), e); if (namespaceShardingManager != null) { try { namespaceShardingManager.stop(); } catch (Exception e2) { log.error(e2.getMessage(), e2); } } client.close(); } } catch (Exception e) { log.error(e.getMessage(), e); } } private void refreshRegistryCenter() { List<String> allOnlineNamespacesTemp = new ArrayList<>(); // 获取新的zkClusters Map<String, ZkCluster> newClusterMap = getZkClusterInfo(); // 对比旧的。不包含的,关闭操作;包含的,检查属性是否相同,如果相同,则直接赋值,否则,关闭旧的 closeInvalidZkClient(newClusterMap); // 完善curatorFramework。如果没有,则新建 connectToZkClusterIfPossible(newClusterMap); // 完善ZkCluster中的注册中心信息,关闭迁移了的域,新建迁移过来的域 for (Map.Entry<String, ZkCluster> zkClusterEntry : newClusterMap.entrySet()) { ZkCluster zkCluster = zkClusterEntry.getValue(); List<NamespaceZkClusterMapping> nsZkClusterMappingList = namespaceZkClusterMapping4SqlService .getAllMappingsOfCluster(zkClusterEntry.getKey()); // zkCluster对应的namespace列表 List<RegistryCenterConfiguration> regCenterConfList = zkCluster.getRegCenterConfList(); closeMoveOutNamespace(zkClusterEntry.getKey(), nsZkClusterMappingList, regCenterConfList); initOrUpdateNamespace(allOnlineNamespacesTemp, zkCluster, nsZkClusterMappingList, regCenterConfList); } // 直接赋值新的 zkClusterMap = (LinkedHashMap<String, ZkCluster>) newClusterMap; allOnlineNamespaces = allOnlineNamespacesTemp; } private void initOrUpdateNamespace(List<String> allOnlineNamespacesTemp, ZkCluster zkCluster, List<NamespaceZkClusterMapping> nsZkClusterMappingList, List<RegistryCenterConfiguration> regCenterConfList) { if (nsZkClusterMappingList == null || zkCluster.isOffline()) { return; } CuratorFramework curatorFramework = zkCluster.getCuratorFramework(); ArrayList<RegistryCenterConfiguration> newRegCenterConfList = new ArrayList<>(); try { for (NamespaceZkClusterMapping mapping : nsZkClusterMappingList) { String namespace = mapping.getNamespace(); // 过滤$SaturnSelf if (SaturnSelfNodePath.ROOT_NAME.equals(namespace)) { continue; } // 如果这个标记为true,意味是新域,或者是迁移过来的域 boolean isNamespaceNotIncludeInOriginRegCenerConfList = false; if (isNamespaceNotIncludeInRegCenterConfList(namespace, regCenterConfList)) { // 对于新添加的域,需要初始化一些znode initNamespaceZkNodeIfNecessary(namespace, curatorFramework); isNamespaceNotIncludeInOriginRegCenerConfList = true; } try { if (isNamespaceNotIncludeInOriginRegCenerConfList || isNewerVersionSaturnNamespace(namespace, curatorFramework)) { NamespaceInfo namespaceInfo = getNamespaceInfo(namespace); if (namespaceInfo == null && !isNamespaceInfoNotFoundCanBeCalculate()) { log.warn("No info about namespace {}, just skip it.", namespace); continue; } RegistryCenterConfiguration conf = new RegistryCenterConfiguration(mapping.getName(), namespace, zkCluster.getZkAddr()); conf.setZkClusterKey(zkCluster.getZkClusterKey()); conf.setVersion(getVersion(namespace, curatorFramework)); conf.setZkAlias(zkCluster.getZkAlias()); if (namespaceInfo != null) { postConstructRegistryCenterConfiguration(conf, namespaceInfo.getContent()); } newRegCenterConfList.add(conf); if (!allOnlineNamespacesTemp.contains(namespace)) { allOnlineNamespacesTemp.add(namespace); } } } catch (Exception e) { log.error(e.getMessage(), e); } } } catch (Exception e) { log.error(e.getMessage(), e); } // 如果nns有改变,则需要关闭旧的 if (regCenterConfList != null) { for (RegistryCenterConfiguration conf : regCenterConfList) { String namespace = conf.getNamespace(); String nns = conf.getNameAndNamespace(); for (RegistryCenterConfiguration confNew : newRegCenterConfList) { String namespaceNew = confNew.getNamespace(); if (namespace.equals(namespaceNew)) { String nnsNew = confNew.getNameAndNamespace(); if (!nns.equals(nnsNew)) { synchronized (getNnsLock(nns)) { closeNamespace(nns); log.info("closed the namespace info because it's nns is changed, namespace is {}", namespace); } } break; } } } } int oldSize = regCenterConfList != null ? regCenterConfList.size() : 0; if (oldSize != newRegCenterConfList.size()) { log.info("Zkcluster [{}] namespace size change from {} to {}", zkCluster.getZkClusterKey(), oldSize, newRegCenterConfList.size()); } zkCluster.setRegCenterConfList(newRegCenterConfList); } protected boolean isNamespaceInfoNotFoundCanBeCalculate() { return true; } private boolean isNamespaceNotIncludeInRegCenterConfList(String namespace, List<RegistryCenterConfiguration> regCenterConfList) { if (regCenterConfList == null || regCenterConfList.isEmpty()) { return true; } for (RegistryCenterConfiguration conf : regCenterConfList) { if (namespace.equals(conf.getNamespace())) { return false; } } return true; } /** * 确保namespace的作业版本>1.0.9 * @return true namespace里面的作业均在>1.0.9以后创建; false, 反之亦然 */ private boolean isNewerVersionSaturnNamespace(String namespace, CuratorFramework curatorFramework) { try { // chcek if /$Job exists if (null != curatorFramework.checkExists().forPath("/" + namespace + JobNodePath.get$JobsNodePath())) { return true; } String executorsPath = "/" + namespace + ExecutorNodePath.getExecutorNodePath(); if (null != curatorFramework.checkExists().forPath(executorsPath)) { List<String> executors = curatorFramework.getChildren().forPath(executorsPath); if (executors != null && !executors.isEmpty()) { for (String exe : executors) { String lastBeginTimePath = executorsPath + "/" + exe + "/lastBeginTime"; if (null != curatorFramework.checkExists().forPath(lastBeginTimePath)) { return true; } } } } return false; } catch (Exception e) { log.error(e.getMessage(), e); return false; } } private NamespaceInfo getNamespaceInfo(String namespace) { try { return namespaceInfoService.selectByNamespace(namespace); } catch (Exception e) { log.error("fail to get namespaceInfo:{} from DB", namespace); return null; } } // For subclass override protected void postConstructRegistryCenterConfiguration(RegistryCenterConfiguration conf, String content) { // do nothing here } private void closeMoveOutNamespace(String zkClusterKey, List<NamespaceZkClusterMapping> nsZkClusterMappingList, List<RegistryCenterConfiguration> regCenterConfList) { if (regCenterConfList == null) { return; } Iterator<RegistryCenterConfiguration> regIter = regCenterConfList.iterator(); while (regIter.hasNext()) { RegistryCenterConfiguration conf = regIter.next(); String namespace = conf.getNamespace(); String nns = conf.getNameAndNamespace(); boolean include = false; if (nsZkClusterMappingList != null) { for (NamespaceZkClusterMapping mapping : nsZkClusterMappingList) { if (namespace.equals(mapping.getNamespace())) { include = true; break; } } } if (!include) { synchronized (getNnsLock(nns)) { regIter.remove(); closeNamespace(nns); log.info("closed the moved namespace info, namespace is {}, old zkClusterKey is {}", namespace, zkClusterKey); } } } } private void closeInvalidZkClient(Map<String, ZkCluster> newClusterMap) { Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); String zkClusterKey = next.getKey(); ZkCluster zkCluster = next.getValue(); if (!newClusterMap.containsKey(zkClusterKey)) { iterator.remove(); closeZkCluster(zkCluster); } else { ZkCluster newZkCluster = newClusterMap.get(zkClusterKey); if (zkCluster.equals(newZkCluster)) { newClusterMap.put(zkClusterKey, zkCluster); } else { iterator.remove(); closeZkCluster(zkCluster); } } } } private Map<String, ZkCluster> getZkClusterInfo() { LinkedHashMap<String, ZkCluster> newClusterMap = new LinkedHashMap<>(); List<ZkClusterInfo> allZkClusterInfoList = zkClusterInfoService.getAllZkClusterInfo(); if (allZkClusterInfoList != null) { for (ZkClusterInfo zkClusterInfo : allZkClusterInfoList) { ZkCluster zkCluster = new ZkCluster(); zkCluster.setZkClusterKey(zkClusterInfo.getZkClusterKey()); zkCluster.setZkAlias(zkClusterInfo.getAlias()); zkCluster.setZkAddr(zkClusterInfo.getConnectString()); newClusterMap.put(zkClusterInfo.getZkClusterKey(), zkCluster); } } return newClusterMap; } private void connectToZkClusterIfPossible(Map<String, ZkCluster> newClusterMap) { Iterator<Entry<String, ZkCluster>> iterator = newClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); ZkCluster zkCluster = next.getValue(); CuratorFramework curatorFramework = zkCluster.getCuratorFramework(); if (curatorFramework == null) { createNewConnect(zkCluster); } } } private Object getNnsLock(String nns) { Object lock = nnsLock.get(nns); if (lock == null) { lock = new Object(); Object pre = nnsLock.putIfAbsent(nns, lock); if (pre != null) { lock = pre; } } return lock; } private void closeNamespace(String nns) { try { RegistryCenterClient registryCenterClient = registryCenterClientMap.remove(nns); if (registryCenterClient != null) { registryCenterClient.close(); } } catch (Exception e) { log.error(e.getMessage(), e); } try { NamespaceShardingManager namespaceShardingManager = namespaceShardingListenerManagerMap.remove(nns); if (namespaceShardingManager != null) { namespaceShardingManager.stopWithCurator(); } } catch (Exception e) { log.error(e.getMessage(), e); } } private void refreshDashboardLeaderTreeCache() { closeDeprecatedDashboardLeaderTreeCache(); Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); String zkClusterKey = next.getKey(); ZkCluster zkCluster = next.getValue(); if (needToRefreshDashboardTreeCache(zkCluster, zkClusterKey)) { DashboardLeaderHandler dashboardLeaderHandler = null; try { dashboardLeaderHandler = new DashboardLeaderHandler(zkCluster.getZkAlias(), zkCluster.getCuratorFramework()); dashboardLeaderHandler.start(); dashboardLeaderTreeCacheMap.put(zkClusterKey, dashboardLeaderHandler); } catch (Exception e) { log.error(e.getMessage(), e); if (dashboardLeaderHandler != null) { dashboardLeaderHandler.shutdown(); } } } } } private boolean needToRefreshDashboardTreeCache(ZkCluster zkCluster, String zkClusterKey) { if (zkCluster.isOffline()) { return false; } if (dashboardLeaderTreeCacheMap.containsKey(zkClusterKey)) { return false; } return isZKClusterCanBeComputed(zkClusterKey) && isCurrentConsoleInTheSameIdc(zkClusterKey); } /** * 将不在本console服务器中进行Dashboard计算的DashboardLeaderTreeCache关闭 */ private void closeDeprecatedDashboardLeaderTreeCache() { if (dashboardLeaderTreeCacheMap == null || dashboardLeaderTreeCacheMap.isEmpty()) { return; } for (String zkClusterKey : dashboardLeaderTreeCacheMap.keySet()) { if (!isZKClusterCanBeComputed(zkClusterKey) || !isCurrentConsoleInTheSameIdc(zkClusterKey)) { log.info("close the deprecated dashboard leader tree Cache, {}", zkClusterKey); DashboardLeaderHandler oldDashboardLeaderHandler = dashboardLeaderTreeCacheMap.remove(zkClusterKey); if (oldDashboardLeaderHandler != null) { oldDashboardLeaderHandler.shutdown(); } } } } /** * Close dashboardLeaderTreeCache, registryCenterClient, namespaceShardingListenerManager with this zkCluster */ private void closeZkCluster(ZkCluster zkCluster) { try { try { DashboardLeaderHandler dashboardLeaderHandler = dashboardLeaderTreeCacheMap .remove(zkCluster.getZkClusterKey()); if (dashboardLeaderHandler != null) { dashboardLeaderHandler.shutdown(); } } catch (Exception e) { log.error(e.getMessage(), e); } ArrayList<RegistryCenterConfiguration> regCenterConfList = zkCluster.getRegCenterConfList(); if (regCenterConfList != null) { for (RegistryCenterConfiguration conf : regCenterConfList) { String nns = conf.getNameAndNamespace(); synchronized (getNnsLock(nns)) { closeNamespace(nns); } } } if (zkCluster.getConnectionListener() != null) { zkCluster.getConnectionListener().shutdownNowUntilTerminated(); zkCluster.setConnectionListener(null); } if (zkCluster.getCuratorFramework() != null) { zkCluster.getCuratorFramework().close(); } } catch (Exception e) { log.error(e.getMessage(), e); } } private void createNewConnect(final ZkCluster zkCluster) { String zkAddr = zkCluster.getZkAddr(); try { final CuratorFramework curatorFramework = curatorRepository.connect(zkAddr, null, zkCluster.getDigest()); if (curatorFramework == null) { log.error("found an offline zkCluster, zkAddr is {}", zkAddr); zkCluster.setCuratorFramework(null); zkCluster.setConnectionListener(null); zkCluster.setOffline(true); } else { AbstractConnectionListener connectionListener = new AbstractConnectionListener( "zk-connectionListener-thread-for-zkCluster-" + zkCluster.getZkAlias()) { @Override public void stop() { zkCluster.setOffline(true); } @Override public void restart() { try { zkCluster.setOffline(false); } catch (Exception e) { log.error(e.getMessage(), e); } } }; zkCluster.setCuratorFramework(curatorFramework); zkCluster.setConnectionListener(connectionListener); zkCluster.setOffline(false); curatorFramework.getConnectionStateListenable().addListener(connectionListener); } } catch (Exception e) { log.error("found an offline zkCluster, zkAddr is {}", zkAddr); log.error(e.getMessage(), e); zkCluster.setCuratorFramework(null); zkCluster.setConnectionListener(null); zkCluster.setOffline(true); } } protected void initNamespaceZkNodeIfNecessary(String namespace, CuratorFramework curatorFramework) { try { String executorsNodePath = "/" + namespace + ExecutorNodePath.get$ExecutorNodePath(); if (curatorFramework.checkExists().forPath(executorsNodePath) == null) { curatorFramework.create().creatingParentsIfNeeded().forPath(executorsNodePath); } String jobsNodePath = "/" + namespace + JobNodePath.get$JobsNodePath(); if (curatorFramework.checkExists().forPath(jobsNodePath) == null) { curatorFramework.create().creatingParentsIfNeeded().forPath(jobsNodePath); } } catch (Exception e) { log.error(e.getMessage(), e); } } private String getVersion(String namespace, CuratorFramework curatorFramework) { try { List<String> versionList = new ArrayList<>(); String executorsPath = "/" + namespace + ExecutorNodePath.getExecutorNodePath(); if (curatorFramework.checkExists().forPath(executorsPath) == null) { return getVersionStrFromList(versionList); } List<String> executors = curatorFramework.getChildren().forPath(executorsPath); if (executors == null || executors.isEmpty()) { return getVersionStrFromList(versionList); } for (String exe : executors) { String versionPath = executorsPath + "/" + exe + "/version"; if (curatorFramework.checkExists().forPath(versionPath) == null) { continue; } byte[] bs = curatorFramework.getData().forPath(versionPath); if (bs == null) { continue; } String version = new String(bs, "UTF-8"); if (!version.trim().isEmpty()) { String tmp = version.trim(); if (!versionList.contains(tmp)) { versionList.add(tmp); } } } return getVersionStrFromList(versionList); } catch (Exception e) { log.error(e.getMessage(), e); return ""; } } private String getVersionStrFromList(List<String> versionList) { Collections.sort(versionList); StringBuilder versionSb = new StringBuilder(); versionSb.append(""); for (int i = 0; i < versionList.size(); i++) { versionSb.append(versionList.get(i)); if (i < versionList.size() - 1) { versionSb.append(", "); } } return versionSb.toString(); } @Override public RegistryCenterClient connect(final String nameAndNameSpace) { final RegistryCenterClient registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(nameAndNameSpace); if (nameAndNameSpace == null) { return registryCenterClient; } synchronized (getNnsLock(nameAndNameSpace)) { if (!registryCenterClientMap.containsKey(nameAndNameSpace)) { RegistryCenterConfiguration registryCenterConfiguration = findConfig(nameAndNameSpace); if (registryCenterConfiguration == null) { return registryCenterClient; } String zkAddressList = registryCenterConfiguration.getZkAddressList(); String namespace = registryCenterConfiguration.getNamespace(); String digest = registryCenterConfiguration.getDigest(); registryCenterClient.setZkAddr(zkAddressList); CuratorFramework client = curatorRepository.connect(zkAddressList, namespace, digest); if (client == null) { return registryCenterClient; } registryCenterClient.setConnected(client.getZookeeperClient().isConnected()); registryCenterClient.setCuratorClient(client); registryCenterClientMap.put(nameAndNameSpace, registryCenterClient); return registryCenterClient; } else { RegistryCenterClient registryCenterClient2 = registryCenterClientMap.get(nameAndNameSpace); if (registryCenterClient2 != null) { if (registryCenterClient2.getCuratorClient() != null) { registryCenterClient2.setConnected( registryCenterClient2.getCuratorClient().getZookeeperClient().isConnected()); } else { registryCenterClient2.setConnected(false); } return registryCenterClient2; } return registryCenterClient; } } } @Override public RegistryCenterClient connectByNamespace(String namespace) { RegistryCenterConfiguration registryCenterConfiguration = findConfigByNamespace(namespace); if (registryCenterConfiguration == null) { return new RegistryCenterClient(); } String nns = registryCenterConfiguration.getNameAndNamespace(); if (nns == null) { return new RegistryCenterClient(); } String zkAddressList = registryCenterConfiguration.getZkAddressList(); String digest = registryCenterConfiguration.getDigest(); synchronized (getNnsLock(nns)) { if (!registryCenterClientMap.containsKey(nns)) { final RegistryCenterClient registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(nns); registryCenterClient.setZkAddr(zkAddressList); CuratorFramework client = curatorRepository.connect(zkAddressList, namespace, digest); if (client == null) { return registryCenterClient; } registryCenterClient.setConnected(client.getZookeeperClient().isConnected()); registryCenterClient.setCuratorClient(client); registryCenterClientMap.put(nns, registryCenterClient); return registryCenterClient; } else { RegistryCenterClient registryCenterClient = registryCenterClientMap.get(nns); if (registryCenterClient == null) { registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(namespace); registryCenterClient.setZkAddr(zkAddressList); } else { if (registryCenterClient.getCuratorClient() != null) { registryCenterClient.setConnected( registryCenterClient.getCuratorClient().getZookeeperClient().isConnected()); } else { registryCenterClient.setConnected(false); } } return registryCenterClient; } } } @Override public RegistryCenterConfiguration findConfig(String nameAndNamespace) { if (Strings.isNullOrEmpty(nameAndNamespace)) { return null; } Collection<ZkCluster> zkClusters = zkClusterMap.values(); for (ZkCluster zkCluster : zkClusters) { for (RegistryCenterConfiguration each : zkCluster.getRegCenterConfList()) { if (each != null && nameAndNamespace.equals(each.getNameAndNamespace())) { return each; } } } return null; } @Override public RegistryCenterConfiguration findConfigByNamespace(String namespace) { if (Strings.isNullOrEmpty(namespace)) { return null; } Collection<ZkCluster> zkClusters = zkClusterMap.values(); for (ZkCluster zkCluster : zkClusters) { for (RegistryCenterConfiguration each : zkCluster.getRegCenterConfList()) { if (each != null && namespace.equals(each.getNamespace())) { return each; } } } return null; } @Override public CuratorRepository.CuratorFrameworkOp connectOnly(String zkAddr, String namespace) throws SaturnJobConsoleException { CuratorFramework curatorFramework = curatorRepository.connect(zkAddr, namespace, null); if (curatorFramework != null) { return curatorRepository.newCuratorFrameworkOp(curatorFramework); } return null; } @Override public void notifyRefreshRegCenter() throws SaturnJobConsoleException { SystemConfig systemConfig = new SystemConfig(); systemConfig.setProperty(SystemConfigProperties.REFRESH_REGISTRY_CENTER_UUID); systemConfig.setValue(UUID.randomUUID().toString()); systemConfigService.insertOrUpdate(systemConfig); } @Override public RegistryCenterClient getCuratorByNameAndNamespace(String nameAndNamespace) { return registryCenterClientMap.get(nameAndNamespace); } @Override public boolean isDashboardLeader(String zkClusterKey) { DashboardLeaderHandler dashboardLeaderHandler = dashboardLeaderTreeCacheMap.get(zkClusterKey); if (dashboardLeaderHandler != null) { return dashboardLeaderHandler.isLeader(); } return false; } @Override public ZkCluster getZkCluster(String zkClusterKey) { return zkClusterMap.get(zkClusterKey); } @Transactional(rollbackFor = Exception.class) @Override public void createZkCluster(String zkClusterKey, String alias, String connectString) throws SaturnJobConsoleException { zkClusterInfoService.createZkCluster(zkClusterKey, alias, connectString, ""); notifyRefreshRegCenter(); } @Override public Collection<ZkCluster> getZkClusterList() { return zkClusterMap.values(); } @Override public List<ZkCluster> getOnlineZkClusterList() { List<ZkCluster> onlineClusters = Lists.newArrayList(); for (ZkCluster zkCluster : zkClusterMap.values()) { if (!zkCluster.isOffline()) { onlineClusters.add(zkCluster); } } return onlineClusters; } private List<String> getZkClusterKeys() { Collection<ZkCluster> zkClusters = getZkClusterList(); List<String> zkClusterKeys = Lists.newArrayList(); for (ZkCluster zkCluster : zkClusters) { zkClusterKeys.add(zkCluster.getZkClusterKey()); } return zkClusterKeys; } @Override public int domainCount(String zkClusterKey) { ZkCluster zkCluster = zkClusterMap.get(zkClusterKey); if (zkCluster != null) { ArrayList<RegistryCenterConfiguration> regList = zkCluster.getRegCenterConfList(); if (regList != null) { return regList.size(); } } return 0; } @Override public boolean namespaceIsCorrect(String namespace, CuratorFramework curatorFramework) throws SaturnJobConsoleException { if (SaturnSelfNodePath.ROOT_NAME.equals(namespace)) { return false; } try { String executorsPath = "/" + namespace + ExecutorNodePath.getExecutorNodePath(); if (curatorFramework.checkExists().forPath(executorsPath) != null) { return true; } String jobsPath = "/" + namespace + JobNodePath.get$JobsNodePath(); return curatorFramework.checkExists().forPath(jobsPath) != null ? true : false; } catch (Exception e) { log.error(e.getMessage(), e); return false; } } @Override public List<String> getNamespaces() throws SaturnJobConsoleException { return allOnlineNamespaces; } @Override public File exportNamespaceInfo(List<String> namespaceList) throws SaturnJobConsoleException { Set<String> targetNamespaceSet = null; if (namespaceList != null) { targetNamespaceSet = Sets.newHashSet(namespaceList); } List<RegistryCenterConfiguration> namespaceInfoList = Lists.newLinkedList(); Collection<ZkCluster> zkClusterList = getZkClusterList(); for (ZkCluster zkCluster : zkClusterList) { List<RegistryCenterConfiguration> namespacesOfZkCluster = zkCluster.getRegCenterConfList(); for (RegistryCenterConfiguration ns : namespacesOfZkCluster) { if (targetNamespaceSet == null || targetNamespaceSet.contains(ns.getNamespace())) { namespaceInfoList.add(ns); } } } return exportNamespaceInfo2Excel(namespaceInfoList); } /** * Export namespac */ private File exportNamespaceInfo2Excel(List<RegistryCenterConfiguration> namespaceInfoList) throws SaturnJobConsoleException { try { File tmpFile = SaturnConsoleUtils.createTmpFile(); WritableWorkbook writableWorkbook = Workbook.createWorkbook(tmpFile); WritableSheet sheet1 = writableWorkbook.createSheet("ns", 0); sheet1.addCell(new Label(0, 0, "域名")); sheet1.addCell(new Label(1, 0, "描述")); sheet1.addCell(new Label(2, 0, "重要等级")); sheet1.addCell(new Label(3, 0, "Executor版本")); sheet1.addCell(new Label(4, 0, "ZK集群")); for (int i = 0; i < namespaceInfoList.size(); i++) { RegistryCenterConfiguration namespaceInfo = namespaceInfoList.get(i); sheet1.addCell(new Label(0, i + 1, namespaceInfo.getNamespace())); sheet1.addCell(new Label(1, i + 1, namespaceInfo.getName())); sheet1.addCell(new Label(2, i + 1, namespaceInfo.getDegree())); sheet1.addCell(new Label(3, i + 1, namespaceInfo.getVersion())); sheet1.addCell(new Label(4, i + 1, namespaceInfo.getZkAlias())); } writableWorkbook.write(); writableWorkbook.close(); return tmpFile; } catch (Exception e) { throw new SaturnJobConsoleException(e); } } @Transactional(rollbackFor = { Exception.class }) @Override public void createNamespace(NamespaceDomainInfo namespaceDomainInfo) throws SaturnJobConsoleException { String namespace = namespaceDomainInfo.getNamespace(); String zkClusterKey = namespaceDomainInfo.getZkCluster(); ZkCluster currentCluster = getZkCluster(zkClusterKey); if (currentCluster == null) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, "not found zkcluster" + zkClusterKey)); } if (checkNamespaceExists(namespace)) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), String.format(ERR_MSG_NS_ALREADY_EXIST, namespace)); } try { // 创建 namespaceInfo NamespaceInfo namespaceInfo = constructNamespaceInfo(namespaceDomainInfo); namespaceInfoService.create(namespaceInfo); // 创建 zkcluster 和 namespaceInfo 关系 namespaceZkClusterMapping4SqlService.insert(namespace, "", zkClusterKey, NAMESPACE_CREATOR_NAME); // refresh notifyRefreshRegCenter(); } catch (Exception e) { log.error(e.getMessage(), e); throw new SaturnJobConsoleHttpException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, e.getMessage())); } } @Override public void updateNamespace(NamespaceDomainInfo namespaceDomainInfo) throws SaturnJobConsoleException { String namespace = namespaceDomainInfo.getNamespace(); if (!checkNamespaceExists(namespace)) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), ERR_MSG_NS_NOT_FOUND); } try { // 创建 namespaceInfo NamespaceInfo namespaceInfo = constructNamespaceInfo(namespaceDomainInfo); namespaceInfoService.update(namespaceInfo); // refresh notifyRefreshRegCenter(); } catch (Exception e) { log.error(e.getMessage(), e); throw new SaturnJobConsoleHttpException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, e.getMessage())); } } @Override public NamespaceDomainInfo getNamespace(String namespace) throws SaturnJobConsoleException { if (namespaceInfoService.selectByNamespace(namespace) == null) { throw new SaturnJobConsoleHttpException(HttpStatus.NOT_FOUND.value(), ERR_MSG_NS_NOT_FOUND); } String zkClusterKey = namespaceZkClusterMapping4SqlService.getZkClusterKey(namespace); if (StringUtils.isBlank(zkClusterKey)) { throw new SaturnJobConsoleHttpException(HttpStatus.NOT_FOUND.value(), ERR_MSG_NS_NOT_FOUND); } NamespaceDomainInfo namespaceDomainInfo = new NamespaceDomainInfo(); namespaceDomainInfo.setNamespace(namespace); namespaceDomainInfo.setZkCluster(zkClusterKey); return namespaceDomainInfo; } @Transactional(rollbackFor = { Exception.class }) @Override public void bindNamespaceAndZkCluster(String namespace, String zkClusterKey, String updatedBy) throws SaturnJobConsoleException { ZkCluster currentCluster = getZkCluster(zkClusterKey); if (currentCluster == null) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, "not found zkcluster" + zkClusterKey)); } // namespace必须要存在 if (!checkNamespaceExists(namespace)) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), ERR_MSG_NS_NOT_FOUND); } // 判断其它集群是否有该域 String zkClusterKeyOther = namespaceZkClusterMapping4SqlService.getZkClusterKey(namespace); if (zkClusterKeyOther != null) { ZkCluster zkClusterOther = getZkCluster(zkClusterKeyOther); if (zkClusterOther == null) { throw new SaturnJobConsoleException("zk cluster 不存在:" + zkClusterKeyOther); } if (zkClusterOther.getZkClusterKey().equals(zkClusterKey)) { throw new SaturnJobConsoleException("Namespace已经存在于此zk集群,不能重复添加"); } else { throw new SaturnJobConsoleException( "Namespace存在于另外的zk集群:" + zkClusterOther.getZkClusterKey() + ",不能重复添加"); } } try { namespaceZkClusterMapping4SqlService.insert(namespace, "", zkClusterKey, updatedBy); postBindNamespaceAndZkCluster(namespace, currentCluster); // refresh notifyRefreshRegCenter(); } catch (Exception e) { namespaceZkClusterMapping4SqlService.remove(namespace, updatedBy); throw new SaturnJobConsoleException(e.getMessage()); } } protected void postBindNamespaceAndZkCluster(String namespace, ZkCluster currentCluster) throws SaturnJobConsoleException { // for subclass implement } private boolean checkNamespaceExists(String namespace) { if (namespaceInfoService.selectByNamespace(namespace) != null) { return true; } // 判断其它集群是否有该域 String zkClusterKeyOther = namespaceZkClusterMapping4SqlService.getZkClusterKey(namespace); return zkClusterKeyOther != null ? true : false; } private NamespaceInfo constructNamespaceInfo(NamespaceDomainInfo namespaceDomainInfo) { NamespaceInfo namespaceInfo = new NamespaceInfo(); namespaceInfo.setCreatedBy(NAMESPACE_CREATOR_NAME); namespaceInfo.setCreateTime(new Date()); namespaceInfo.setIsDeleted(0); namespaceInfo.setLastUpdatedBy(NAMESPACE_CREATOR_NAME); namespaceInfo.setLastUpdateTime(new Date()); namespaceInfo.setNamespace(namespaceDomainInfo.getNamespace()); namespaceInfo.setContent(namespaceDomainInfo.getContent()); return namespaceInfo; } @Override public CuratorRepository.CuratorFrameworkOp getCuratorFrameworkOp(String namespace) throws SaturnJobConsoleException { CuratorRepository.CuratorFrameworkOp curatorFrameworkOp = null; try { RegistryCenterConfiguration registryCenterConfiguration = findConfigByNamespace(namespace); if (registryCenterConfiguration == null) { throw new SaturnJobConsoleException("Connect zookeeper failed"); } String nns = registryCenterConfiguration.getNameAndNamespace(); if (nns == null) { throw new SaturnJobConsoleException("Connect zookeeper failed"); } String zkAddressList = registryCenterConfiguration.getZkAddressList(); String digest = registryCenterConfiguration.getDigest(); synchronized (getNnsLock(nns)) { if (!registryCenterClientMap.containsKey(nns)) { final RegistryCenterClient registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(nns); registryCenterClient.setZkAddr(zkAddressList); CuratorFramework curatorFramework = curatorRepository.connect(zkAddressList, namespace, digest); if (curatorFramework != null) { registryCenterClient.setConnected(curatorFramework.getZookeeperClient().isConnected()); registryCenterClient.setCuratorClient(curatorFramework); registryCenterClientMap.put(nns, registryCenterClient); curatorFrameworkOp = curatorRepository.newCuratorFrameworkOp(curatorFramework); } } else { RegistryCenterClient registryCenterClient = registryCenterClientMap.get(nns); if (registryCenterClient != null) { CuratorFramework curatorFramework = registryCenterClient.getCuratorClient(); if (curatorFramework != null) { registryCenterClient.setConnected(curatorFramework.getZookeeperClient().isConnected()); curatorFrameworkOp = curatorRepository.newCuratorFrameworkOp(curatorFramework); } } } } } catch (SaturnJobConsoleException e) { throw e; } catch (Exception e) { throw new SaturnJobConsoleException(e); } if (curatorFrameworkOp == null) { throw new SaturnJobConsoleException("Connect zookeeper failed"); } return curatorFrameworkOp; } }
saturn-console-api/src/main/java/com/vip/saturn/job/console/service/impl/RegistryCenterServiceImpl.java
/** * Copyright 2016 vip.com. * <p> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * </p> */ package com.vip.saturn.job.console.service.impl; import com.google.common.base.Strings; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.vip.saturn.job.console.SaturnEnvProperties; import com.vip.saturn.job.console.domain.NamespaceDomainInfo; import com.vip.saturn.job.console.domain.RegistryCenterClient; import com.vip.saturn.job.console.domain.RegistryCenterConfiguration; import com.vip.saturn.job.console.domain.ZkCluster; import com.vip.saturn.job.console.exception.SaturnJobConsoleException; import com.vip.saturn.job.console.exception.SaturnJobConsoleHttpException; import com.vip.saturn.job.console.mybatis.entity.NamespaceInfo; import com.vip.saturn.job.console.mybatis.entity.NamespaceZkClusterMapping; import com.vip.saturn.job.console.mybatis.entity.SystemConfig; import com.vip.saturn.job.console.mybatis.entity.ZkClusterInfo; import com.vip.saturn.job.console.mybatis.service.NamespaceInfoService; import com.vip.saturn.job.console.mybatis.service.NamespaceZkClusterMapping4SqlService; import com.vip.saturn.job.console.mybatis.service.ZkClusterInfoService; import com.vip.saturn.job.console.repository.zookeeper.CuratorRepository; import com.vip.saturn.job.console.service.RegistryCenterService; import com.vip.saturn.job.console.service.SystemConfigService; import com.vip.saturn.job.console.service.cache.DashboardLeaderHandler; import com.vip.saturn.job.console.service.helper.SystemConfigProperties; import com.vip.saturn.job.console.service.helper.ZkClusterMappingUtils; import com.vip.saturn.job.console.utils.*; import com.vip.saturn.job.integrate.service.ReportAlarmService; import com.vip.saturn.job.integrate.service.UpdateJobConfigService; import com.vip.saturn.job.sharding.NamespaceShardingManager; import com.vip.saturn.job.sharding.listener.AbstractConnectionListener; import jxl.Workbook; import jxl.write.Label; import jxl.write.WritableSheet; import jxl.write.WritableWorkbook; import org.apache.commons.lang3.StringUtils; import org.apache.curator.framework.CuratorFramework; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.http.HttpStatus; import org.springframework.transaction.annotation.Transactional; import org.springframework.util.CollectionUtils; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import javax.annotation.Resource; import java.io.File; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.*; import java.util.concurrent.atomic.AtomicBoolean; public class RegistryCenterServiceImpl implements RegistryCenterService { protected static final String DEFAULT_CONSOLE_CLUSTER_ID = "default"; protected static final String NAMESPACE_CREATOR_NAME = "REST_API"; protected static final String ERR_MSG_TEMPLATE_FAIL_TO_CREATE = "Fail to create new namespace {%s} for reason {%s}"; protected static final String ERR_MSG_NS_NOT_FOUND = "The namespace does not exists."; protected static final String ERR_MSG_NS_ALREADY_EXIST = "Invalid request. Namespace: {%s} already existed"; private static final Logger log = LoggerFactory.getLogger(RegistryCenterServiceImpl.class); @Resource private CuratorRepository curatorRepository; @Resource private ReportAlarmService reportAlarmService; @Resource private UpdateJobConfigService updateJobConfigService; @Resource private ZkClusterInfoService zkClusterInfoService; @Resource private SystemConfigService systemConfigService; @Resource private NamespaceZkClusterMapping4SqlService namespaceZkClusterMapping4SqlService; @Resource private NamespaceInfoService namespaceInfoService; /** * 为保证values有序 **/ private LinkedHashMap<String, ZkCluster> zkClusterMap = new LinkedHashMap<>(); private ConcurrentHashMap<String, DashboardLeaderHandler> dashboardLeaderTreeCacheMap = new ConcurrentHashMap<>(); // namespace is unique in all zkClusters private ConcurrentHashMap<String /** nns */ , RegistryCenterClient> registryCenterClientMap = new ConcurrentHashMap<>(); private ConcurrentHashMap<String, Object> nnsLock = new ConcurrentHashMap<>(); // maybe could remove in right time // namespace is unique in all zkClusters private ConcurrentHashMap<String /** nns **/ , NamespaceShardingManager> namespaceShardingListenerManagerMap = new ConcurrentHashMap<>(); private List<String> allOnlineNamespaces = new ArrayList<>(); private String consoleClusterId; private Set<String> restrictComputeZkClusterKeys = Sets.newHashSet(); private ScheduledThreadPoolExecutor localRefreshTimer; private ScheduledThreadPoolExecutor localRefreshIfNecessaryTimer; private AtomicBoolean isOnRefreshingFlag = new AtomicBoolean(false); @PostConstruct public void init() { getConsoleClusterId(); localRefresh(); startLocalRefreshTimer(); startLocalRefreshIfNecessaryTimer(); } private void getConsoleClusterId() { if (StringUtils.isBlank(SaturnEnvProperties.VIP_SATURN_CONSOLE_CLUSTER_ID)) { log.info( "No environment variable or system property of [VIP_SATURN_CONSOLE_CLUSTER] is set. Use the default Id"); consoleClusterId = DEFAULT_CONSOLE_CLUSTER_ID; } else { consoleClusterId = SaturnEnvProperties.VIP_SATURN_CONSOLE_CLUSTER_ID; } } @PreDestroy public void destroy() { Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { closeZkCluster(iterator.next().getValue()); } if (localRefreshTimer != null) { localRefreshTimer.shutdownNow(); } if (localRefreshIfNecessaryTimer != null) { localRefreshIfNecessaryTimer.shutdownNow(); } } private void startLocalRefreshTimer() { localRefreshTimer = new ScheduledThreadPoolExecutor(1, new SaturnThreadFactory("refresh-RegCenter-timer")); localRefreshTimer.scheduleWithFixedDelay(new Runnable() { @Override public void run() { if (isOnRefreshingFlag.compareAndSet(false, true)) { try { localRefresh(); } catch (Exception e) { log.error(e.getMessage(), e); } finally { isOnRefreshingFlag.set(false); } } } }, 2L, 5L, TimeUnit.MINUTES); } private void startLocalRefreshIfNecessaryTimer() { localRefreshIfNecessaryTimer = new ScheduledThreadPoolExecutor(1, new SaturnThreadFactory("refresh-RegCenter-if-necessary-timer")); localRefreshIfNecessaryTimer.scheduleWithFixedDelay(new Runnable() { private String lastUuid = null; @Override public void run() { try { String uuid = systemConfigService.getValueDirectly(SystemConfigProperties.REFRESH_REGISTRY_CENTER_UUID); if (StringUtils.isBlank(uuid)) { notifyRefreshRegCenter(); } else if (!uuid.equals(lastUuid)) { try { lastUuid = uuid; localRefresh(); } catch (Exception e) { log.error(e.getMessage(), e); } } } catch (Exception e) { log.error(e.getMessage(), e); } } }, 1L, 1L, TimeUnit.SECONDS); } private synchronized void localRefresh() { try { log.info("Start refresh RegCenter"); long startTime = System.currentTimeMillis(); refreshRestrictComputeZkClusters(); if (restrictComputeZkClusterKeys.isEmpty()) { log.warn("根据Console的集群ID:" + consoleClusterId + ",找不到配置可以参与Sharding和Dashboard计算的zk集群"); return; } refreshRegistryCenter(); refreshDashboardLeaderTreeCache(); refreshNamespaceShardingListenerManagerMap(); log.info("End refresh RegCenter, cost {}ms", System.currentTimeMillis() - startTime); } catch (Exception e) { log.error("refresh RegCenter error", e); } } /** * 解析Console集群和zk的映射关系 数据库中配置的例子如下: CONSOLE-1:/saturn,/forVdos;CONSOLE-2:/zk3; 如果不存在此配置项,则可以计算所有zk集群; */ private void refreshRestrictComputeZkClusters() throws SaturnJobConsoleException { // clear 当前可计算的zkCluster集群列表 restrictComputeZkClusterKeys.clear(); String allMappingStr = systemConfigService.getValueDirectly(SystemConfigProperties.CONSOLE_ZK_CLUSTER_MAPPING); if (StringUtils.isBlank(allMappingStr)) { log.info( "CONSOLE_ZK_CLUSTER_MAPPING is not configured in sys_config, so all zk clusters can be computed by this console"); restrictComputeZkClusterKeys.addAll(getZkClusterKeys()); return; } allMappingStr = StringUtils.deleteWhitespace(allMappingStr); String[] singleConsoleMappingArray = allMappingStr.split(";"); for (String singleConsoleMappingStr : singleConsoleMappingArray) { String[] consoleAndClusterKeyArray = singleConsoleMappingStr.split(":"); if (consoleAndClusterKeyArray.length != 2) { throw new SaturnJobConsoleException("the CONSOLE_ZK_CLUSTER_MAPPING(" + Arrays.toString(consoleAndClusterKeyArray) + ") format is not correct, should be like console_cluster_id:zk_cluster_id"); } String tempConsoleClusterId = consoleAndClusterKeyArray[0]; String zkClusterKeyStr = consoleAndClusterKeyArray[1]; if (consoleClusterId.equals(tempConsoleClusterId)) { String[] zkClusterKeyArray = zkClusterKeyStr.trim().split(","); restrictComputeZkClusterKeys.addAll(Arrays.asList(zkClusterKeyArray)); log.info("the current console cluster:{} can do sharding and dashboard to zk clusters:{}", consoleClusterId, restrictComputeZkClusterKeys); return; } } } /** * 判断该集群是否能被本Console计算 */ private boolean isZKClusterCanBeComputed(String clusterKey) { if (CollectionUtils.isEmpty(restrictComputeZkClusterKeys)) { return false; } return restrictComputeZkClusterKeys.contains(clusterKey); } /** * 判断是否同机房 */ private boolean isCurrentConsoleInTheSameIdc(String clusterKey) { return ZkClusterMappingUtils.isCurrentConsoleInTheSameIdc(systemConfigService, clusterKey); } private String generateShardingLeadershipHostValue() { return LocalHostService.cachedIpAddress + "-" + UUID.randomUUID().toString(); } /** * 创建或者移除namespaceShardingManager. */ private void refreshNamespaceShardingListenerManagerMap() { Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); ZkCluster zkCluster = next.getValue(); ArrayList<RegistryCenterConfiguration> regCenterConfList = zkCluster.getRegCenterConfList(); if (regCenterConfList == null) { continue; } for (RegistryCenterConfiguration conf : regCenterConfList) { String nns = conf.getNameAndNamespace(); if (!namespaceShardingListenerManagerMap.containsKey(nns)) { if (isZKClusterCanBeComputed(conf.getZkClusterKey())) { createNamespaceShardingManager(conf, nns); } } else { NamespaceShardingManager namespaceShardingManager = namespaceShardingListenerManagerMap.get(nns); if (!isZKClusterCanBeComputed(conf.getZkClusterKey())) { namespaceShardingManager.stopWithCurator(); namespaceShardingListenerManagerMap.remove(nns); } } } } } private void createNamespaceShardingManager(RegistryCenterConfiguration conf, String nns) { try { log.info("Start NamespaceShardingManager {}", nns); String namespace = conf.getNamespace(); String digest = conf.getDigest(); CuratorFramework client = curatorRepository.connect(conf.getZkAddressList(), namespace, digest); if (client == null) { log.warn("fail to connect to zk during create NamespaceShardingManager"); return; } NamespaceShardingManager namespaceShardingManager = null; try { namespaceShardingManager = new NamespaceShardingManager(client, namespace, generateShardingLeadershipHostValue(), reportAlarmService, updateJobConfigService); namespaceShardingManager.start(); if (namespaceShardingListenerManagerMap.putIfAbsent(nns, namespaceShardingManager) != null) { // 已经存在,则关闭当前的client try { namespaceShardingManager.stopWithCurator(); } catch (Exception e) { log.error(e.getMessage(), e); } } else { log.info("Done starting NamespaceShardingManager {}", nns); } } catch (Exception e) { log.error(e.getMessage(), e); if (namespaceShardingManager != null) { try { namespaceShardingManager.stop(); } catch (Exception e2) { log.error(e2.getMessage(), e2); } } client.close(); } } catch (Exception e) { log.error(e.getMessage(), e); } } private void refreshRegistryCenter() { List<String> allOnlineNamespacesTemp = new ArrayList<>(); // 获取新的zkClusters Map<String, ZkCluster> newClusterMap = getZkClusterInfo(); // 对比旧的。不包含的,关闭操作;包含的,检查属性是否相同,如果相同,则直接赋值,否则,关闭旧的 closeInvalidZkClient(newClusterMap); // 完善curatorFramework。如果没有,则新建 connectToZkClusterIfPossible(newClusterMap); // 完善ZkCluster中的注册中心信息,关闭迁移了的域,新建迁移过来的域 for (Map.Entry<String, ZkCluster> zkClusterEntry : newClusterMap.entrySet()) { ZkCluster zkCluster = zkClusterEntry.getValue(); List<NamespaceZkClusterMapping> nsZkClusterMappingList = namespaceZkClusterMapping4SqlService .getAllMappingsOfCluster(zkClusterEntry.getKey()); // zkCluster对应的namespace列表 List<RegistryCenterConfiguration> regCenterConfList = zkCluster.getRegCenterConfList(); closeMoveOutNamespace(zkClusterEntry.getKey(), nsZkClusterMappingList, regCenterConfList); initOrUpdateNamespace(allOnlineNamespacesTemp, zkCluster, nsZkClusterMappingList, regCenterConfList); } // 直接赋值新的 zkClusterMap = (LinkedHashMap<String, ZkCluster>) newClusterMap; allOnlineNamespaces = allOnlineNamespacesTemp; } private void initOrUpdateNamespace(List<String> allOnlineNamespacesTemp, ZkCluster zkCluster, List<NamespaceZkClusterMapping> nsZkClusterMappingList, List<RegistryCenterConfiguration> regCenterConfList) { if (nsZkClusterMappingList == null || zkCluster.isOffline()) { return; } CuratorFramework curatorFramework = zkCluster.getCuratorFramework(); ArrayList<RegistryCenterConfiguration> newRegCenterConfList = new ArrayList<>(); try { for (NamespaceZkClusterMapping mapping : nsZkClusterMappingList) { String namespace = mapping.getNamespace(); // 过滤$SaturnSelf if (SaturnSelfNodePath.ROOT_NAME.equals(namespace)) { continue; } // 如果这个标记为true,意味是新域,或者是迁移过来的域 boolean isNamespaceNotIncludeInOriginRegCenerConfList = false; if (isNamespaceNotIncludeInRegCenterConfList(namespace, regCenterConfList)) { // 对于新添加的域,需要初始化一些znode initNamespaceZkNodeIfNecessary(namespace, curatorFramework); isNamespaceNotIncludeInOriginRegCenerConfList = true; } try { if (isNamespaceNotIncludeInOriginRegCenerConfList || isNewerVersionSaturnNamespace(namespace, curatorFramework)) { NamespaceInfo namespaceInfo = getNamespaceInfo(namespace); if (namespaceInfo == null && !isNamespaceInfoNotFoundCanBeCalculate()) { log.warn("No info about namespace {}, just skip it.", namespace); continue; } RegistryCenterConfiguration conf = new RegistryCenterConfiguration(mapping.getName(), namespace, zkCluster.getZkAddr()); conf.setZkClusterKey(zkCluster.getZkClusterKey()); conf.setVersion(getVersion(namespace, curatorFramework)); conf.setZkAlias(zkCluster.getZkAlias()); if (namespaceInfo != null) { postConstructRegistryCenterConfiguration(conf, namespaceInfo.getContent()); } newRegCenterConfList.add(conf); if (!allOnlineNamespacesTemp.contains(namespace)) { allOnlineNamespacesTemp.add(namespace); } } } catch (Exception e) { log.error(e.getMessage(), e); } } } catch (Exception e) { log.error(e.getMessage(), e); } // 如果nns有改变,则需要关闭旧的 if (regCenterConfList != null) { for (RegistryCenterConfiguration conf : regCenterConfList) { String namespace = conf.getNamespace(); String nns = conf.getNameAndNamespace(); for (RegistryCenterConfiguration confNew : newRegCenterConfList) { String namespaceNew = confNew.getNamespace(); if (namespace.equals(namespaceNew)) { String nnsNew = confNew.getNameAndNamespace(); if (!nns.equals(nnsNew)) { synchronized (getNnsLock(nns)) { closeNamespace(nns); log.info("closed the namespace info because it's nns is changed, namespace is {}", namespace); } } break; } } } } int oldSize = regCenterConfList != null ? regCenterConfList.size() : 0; if (oldSize != newRegCenterConfList.size()) { log.info("Zkcluster [{}] namespace size change from {} to {}", zkCluster.getZkClusterKey(), oldSize, newRegCenterConfList.size()); } zkCluster.setRegCenterConfList(newRegCenterConfList); } protected boolean isNamespaceInfoNotFoundCanBeCalculate() { return true; } private boolean isNamespaceNotIncludeInRegCenterConfList(String namespace, List<RegistryCenterConfiguration> regCenterConfList) { if (regCenterConfList == null || regCenterConfList.isEmpty()) { return true; } for (RegistryCenterConfiguration conf : regCenterConfList) { if (namespace.equals(conf.getNamespace())) { return false; } } return true; } /** * 确保namespace的作业版本>1.0.9 * @return true namespace里面的作业均在>1.0.9以后创建; false, 反之亦然 */ private boolean isNewerVersionSaturnNamespace(String namespace, CuratorFramework curatorFramework) { try { // chcek if /$Job exists if (null != curatorFramework.checkExists().forPath("/" + namespace + JobNodePath.get$JobsNodePath())) { return true; } String executorsPath = "/" + namespace + ExecutorNodePath.getExecutorNodePath(); if (null != curatorFramework.checkExists().forPath(executorsPath)) { List<String> executors = curatorFramework.getChildren().forPath(executorsPath); if (executors != null && !executors.isEmpty()) { for (String exe : executors) { String lastBeginTimePath = executorsPath + "/" + exe + "/lastBeginTime"; if (null != curatorFramework.checkExists().forPath(lastBeginTimePath)) { return true; } } } } return false; } catch (Exception e) { log.error(e.getMessage(), e); return false; } } private NamespaceInfo getNamespaceInfo(String namespace) { try { return namespaceInfoService.selectByNamespace(namespace); } catch (Exception e) { log.error("fail to get namespaceInfo:{} from DB", namespace); return null; } } // For subclass override protected void postConstructRegistryCenterConfiguration(RegistryCenterConfiguration conf, String content) { // do nothing here } private void closeMoveOutNamespace(String zkClusterKey, List<NamespaceZkClusterMapping> nsZkClusterMappingList, List<RegistryCenterConfiguration> regCenterConfList) { if (regCenterConfList == null) { return; } Iterator<RegistryCenterConfiguration> regIter = regCenterConfList.iterator(); while (regIter.hasNext()) { RegistryCenterConfiguration conf = regIter.next(); String namespace = conf.getNamespace(); String nns = conf.getNameAndNamespace(); boolean include = false; if (nsZkClusterMappingList != null) { for (NamespaceZkClusterMapping mapping : nsZkClusterMappingList) { if (namespace.equals(mapping.getNamespace())) { include = true; break; } } } if (!include) { synchronized (getNnsLock(nns)) { regIter.remove(); closeNamespace(nns); log.info("closed the moved namespace info, namespace is {}, old zkClusterKey is {}", namespace, zkClusterKey); } } } } private void closeInvalidZkClient(Map<String, ZkCluster> newClusterMap) { Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); String zkClusterKey = next.getKey(); ZkCluster zkCluster = next.getValue(); if (!newClusterMap.containsKey(zkClusterKey)) { iterator.remove(); closeZkCluster(zkCluster); } else { ZkCluster newZkCluster = newClusterMap.get(zkClusterKey); if (zkCluster.equals(newZkCluster)) { newClusterMap.put(zkClusterKey, zkCluster); } else { iterator.remove(); closeZkCluster(zkCluster); } } } } private Map<String, ZkCluster> getZkClusterInfo() { LinkedHashMap<String, ZkCluster> newClusterMap = new LinkedHashMap<>(); List<ZkClusterInfo> allZkClusterInfoList = zkClusterInfoService.getAllZkClusterInfo(); if (allZkClusterInfoList != null) { for (ZkClusterInfo zkClusterInfo : allZkClusterInfoList) { ZkCluster zkCluster = new ZkCluster(); zkCluster.setZkClusterKey(zkClusterInfo.getZkClusterKey()); zkCluster.setZkAlias(zkClusterInfo.getAlias()); zkCluster.setZkAddr(zkClusterInfo.getConnectString()); newClusterMap.put(zkClusterInfo.getZkClusterKey(), zkCluster); } } return newClusterMap; } private void connectToZkClusterIfPossible(Map<String, ZkCluster> newClusterMap) { Iterator<Entry<String, ZkCluster>> iterator = newClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); ZkCluster zkCluster = next.getValue(); CuratorFramework curatorFramework = zkCluster.getCuratorFramework(); if (curatorFramework == null) { createNewConnect(zkCluster); } } } private Object getNnsLock(String nns) { Object lock = nnsLock.get(nns); if (lock == null) { lock = new Object(); Object pre = nnsLock.putIfAbsent(nns, lock); if (pre != null) { lock = pre; } } return lock; } private void closeNamespace(String nns) { try { RegistryCenterClient registryCenterClient = registryCenterClientMap.remove(nns); if (registryCenterClient != null) { registryCenterClient.close(); } } catch (Exception e) { log.error(e.getMessage(), e); } try { NamespaceShardingManager namespaceShardingManager = namespaceShardingListenerManagerMap.remove(nns); if (namespaceShardingManager != null) { namespaceShardingManager.stopWithCurator(); } } catch (Exception e) { log.error(e.getMessage(), e); } } private void refreshDashboardLeaderTreeCache() { closeDeprecatedDashboardLeaderTreeCache(); Iterator<Entry<String, ZkCluster>> iterator = zkClusterMap.entrySet().iterator(); while (iterator.hasNext()) { Entry<String, ZkCluster> next = iterator.next(); String zkClusterKey = next.getKey(); ZkCluster zkCluster = next.getValue(); if (needToRefreshDashboardTreeCache(zkCluster, zkClusterKey)) { DashboardLeaderHandler dashboardLeaderHandler = null; try { dashboardLeaderHandler = new DashboardLeaderHandler(zkCluster.getZkAlias(), zkCluster.getCuratorFramework()); dashboardLeaderHandler.start(); dashboardLeaderTreeCacheMap.put(zkClusterKey, dashboardLeaderHandler); } catch (Exception e) { log.error(e.getMessage(), e); if (dashboardLeaderHandler != null) { dashboardLeaderHandler.shutdown(); } } } } } private boolean needToRefreshDashboardTreeCache(ZkCluster zkCluster, String zkClusterKey) { if (zkCluster.isOffline()) { return false; } if (dashboardLeaderTreeCacheMap.containsKey(zkClusterKey)) { return false; } return isZKClusterCanBeComputed(zkClusterKey) && isCurrentConsoleInTheSameIdc(zkClusterKey); } /** * 将不在本console服务器中进行Dashboard计算的DashboardLeaderTreeCache关闭 */ private void closeDeprecatedDashboardLeaderTreeCache() { if (dashboardLeaderTreeCacheMap == null || dashboardLeaderTreeCacheMap.isEmpty()) { return; } for (String zkClusterKey : dashboardLeaderTreeCacheMap.keySet()) { if (!isZKClusterCanBeComputed(zkClusterKey) || !isCurrentConsoleInTheSameIdc(zkClusterKey)) { log.info("close the deprecated dashboard leader tree Cache, {}", zkClusterKey); DashboardLeaderHandler oldDashboardLeaderHandler = dashboardLeaderTreeCacheMap.remove(zkClusterKey); if (oldDashboardLeaderHandler != null) { oldDashboardLeaderHandler.shutdown(); } } } } /** * Close dashboardLeaderTreeCache, registryCenterClient, namespaceShardingListenerManager with this zkCluster */ private void closeZkCluster(ZkCluster zkCluster) { try { try { DashboardLeaderHandler dashboardLeaderHandler = dashboardLeaderTreeCacheMap .remove(zkCluster.getZkClusterKey()); if (dashboardLeaderHandler != null) { dashboardLeaderHandler.shutdown(); } } catch (Exception e) { log.error(e.getMessage(), e); } ArrayList<RegistryCenterConfiguration> regCenterConfList = zkCluster.getRegCenterConfList(); if (regCenterConfList != null) { for (RegistryCenterConfiguration conf : regCenterConfList) { String nns = conf.getNameAndNamespace(); synchronized (getNnsLock(nns)) { closeNamespace(nns); } } } if (zkCluster.getConnectionListener() != null) { zkCluster.getConnectionListener().shutdownNowUntilTerminated(); zkCluster.setConnectionListener(null); } if (zkCluster.getCuratorFramework() != null) { zkCluster.getCuratorFramework().close(); } } catch (Exception e) { log.error(e.getMessage(), e); } } private void createNewConnect(final ZkCluster zkCluster) { String zkAddr = zkCluster.getZkAddr(); try { final CuratorFramework curatorFramework = curatorRepository.connect(zkAddr, null, zkCluster.getDigest()); if (curatorFramework == null) { log.error("found an offline zkCluster, zkAddr is {}", zkAddr); zkCluster.setCuratorFramework(null); zkCluster.setConnectionListener(null); zkCluster.setOffline(true); } else { AbstractConnectionListener connectionListener = new AbstractConnectionListener( "zk-connectionListener-thread-for-zkCluster-" + zkCluster.getZkAlias()) { @Override public void stop() { zkCluster.setOffline(true); } @Override public void restart() { try { zkCluster.setOffline(false); } catch (Exception e) { log.error(e.getMessage(), e); } } }; zkCluster.setCuratorFramework(curatorFramework); zkCluster.setConnectionListener(connectionListener); zkCluster.setOffline(false); curatorFramework.getConnectionStateListenable().addListener(connectionListener); } } catch (Exception e) { log.error("found an offline zkCluster, zkAddr is {}", zkAddr); log.error(e.getMessage(), e); zkCluster.setCuratorFramework(null); zkCluster.setConnectionListener(null); zkCluster.setOffline(true); } } protected void initNamespaceZkNodeIfNecessary(String namespace, CuratorFramework curatorFramework) { try { String executorsNodePath = "/" + namespace + ExecutorNodePath.get$ExecutorNodePath(); if (curatorFramework.checkExists().forPath(executorsNodePath) == null) { curatorFramework.create().creatingParentsIfNeeded().forPath(executorsNodePath); } String jobsNodePath = "/" + namespace + JobNodePath.get$JobsNodePath(); if (curatorFramework.checkExists().forPath(jobsNodePath) == null) { curatorFramework.create().creatingParentsIfNeeded().forPath(jobsNodePath); } } catch (Exception e) { log.error(e.getMessage(), e); } } private String getVersion(String namespace, CuratorFramework curatorFramework) { try { List<String> versionList = new ArrayList<>(); String executorsPath = "/" + namespace + ExecutorNodePath.getExecutorNodePath(); if (curatorFramework.checkExists().forPath(executorsPath) == null) { return getVersionStrFromList(versionList); } List<String> executors = curatorFramework.getChildren().forPath(executorsPath); if (executors == null || executors.isEmpty()) { return getVersionStrFromList(versionList); } for (String exe : executors) { String versionPath = executorsPath + "/" + exe + "/version"; if (curatorFramework.checkExists().forPath(versionPath) == null) { continue; } byte[] bs = curatorFramework.getData().forPath(versionPath); if (bs == null) { continue; } String version = new String(bs, "UTF-8"); if (!version.trim().isEmpty()) { String tmp = version.trim(); if (!versionList.contains(tmp)) { versionList.add(tmp); } } } return getVersionStrFromList(versionList); } catch (Exception e) { log.error(e.getMessage(), e); return ""; } } private String getVersionStrFromList(List<String> versionList) { Collections.sort(versionList); StringBuilder versionSb = new StringBuilder(); versionSb.append(""); for (int i = 0; i < versionList.size(); i++) { versionSb.append(versionList.get(i)); if (i < versionList.size() - 1) { versionSb.append(", "); } } return versionSb.toString(); } @Override public RegistryCenterClient connect(final String nameAndNameSpace) { final RegistryCenterClient registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(nameAndNameSpace); if (nameAndNameSpace == null) { return registryCenterClient; } synchronized (getNnsLock(nameAndNameSpace)) { if (!registryCenterClientMap.containsKey(nameAndNameSpace)) { RegistryCenterConfiguration registryCenterConfiguration = findConfig(nameAndNameSpace); if (registryCenterConfiguration == null) { return registryCenterClient; } String zkAddressList = registryCenterConfiguration.getZkAddressList(); String namespace = registryCenterConfiguration.getNamespace(); String digest = registryCenterConfiguration.getDigest(); registryCenterClient.setZkAddr(zkAddressList); CuratorFramework client = curatorRepository.connect(zkAddressList, namespace, digest); if (client == null) { return registryCenterClient; } registryCenterClient.setConnected(client.getZookeeperClient().isConnected()); registryCenterClient.setCuratorClient(client); registryCenterClientMap.put(nameAndNameSpace, registryCenterClient); return registryCenterClient; } else { RegistryCenterClient registryCenterClient2 = registryCenterClientMap.get(nameAndNameSpace); if (registryCenterClient2 != null) { if (registryCenterClient2.getCuratorClient() != null) { registryCenterClient2.setConnected( registryCenterClient2.getCuratorClient().getZookeeperClient().isConnected()); } else { registryCenterClient2.setConnected(false); } return registryCenterClient2; } return registryCenterClient; } } } @Override public RegistryCenterClient connectByNamespace(String namespace) { RegistryCenterConfiguration registryCenterConfiguration = findConfigByNamespace(namespace); if (registryCenterConfiguration == null) { return new RegistryCenterClient(); } String nns = registryCenterConfiguration.getNameAndNamespace(); if (nns == null) { return new RegistryCenterClient(); } String zkAddressList = registryCenterConfiguration.getZkAddressList(); String digest = registryCenterConfiguration.getDigest(); synchronized (getNnsLock(nns)) { if (!registryCenterClientMap.containsKey(nns)) { final RegistryCenterClient registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(nns); registryCenterClient.setZkAddr(zkAddressList); CuratorFramework client = curatorRepository.connect(zkAddressList, namespace, digest); if (client == null) { return registryCenterClient; } registryCenterClient.setConnected(client.getZookeeperClient().isConnected()); registryCenterClient.setCuratorClient(client); registryCenterClientMap.put(nns, registryCenterClient); return registryCenterClient; } else { RegistryCenterClient registryCenterClient = registryCenterClientMap.get(nns); if (registryCenterClient == null) { registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(namespace); registryCenterClient.setZkAddr(zkAddressList); } else { if (registryCenterClient.getCuratorClient() != null) { registryCenterClient.setConnected( registryCenterClient.getCuratorClient().getZookeeperClient().isConnected()); } else { registryCenterClient.setConnected(false); } } return registryCenterClient; } } } @Override public RegistryCenterConfiguration findConfig(String nameAndNamespace) { if (Strings.isNullOrEmpty(nameAndNamespace)) { return null; } Collection<ZkCluster> zkClusters = zkClusterMap.values(); for (ZkCluster zkCluster : zkClusters) { for (RegistryCenterConfiguration each : zkCluster.getRegCenterConfList()) { if (each != null && nameAndNamespace.equals(each.getNameAndNamespace())) { return each; } } } return null; } @Override public RegistryCenterConfiguration findConfigByNamespace(String namespace) { if (Strings.isNullOrEmpty(namespace)) { return null; } Collection<ZkCluster> zkClusters = zkClusterMap.values(); for (ZkCluster zkCluster : zkClusters) { for (RegistryCenterConfiguration each : zkCluster.getRegCenterConfList()) { if (each != null && namespace.equals(each.getNamespace())) { return each; } } } return null; } @Override public CuratorRepository.CuratorFrameworkOp connectOnly(String zkAddr, String namespace) throws SaturnJobConsoleException { CuratorFramework curatorFramework = curatorRepository.connect(zkAddr, namespace, null); if (curatorFramework != null) { return curatorRepository.newCuratorFrameworkOp(curatorFramework); } return null; } @Override public void notifyRefreshRegCenter() throws SaturnJobConsoleException { SystemConfig systemConfig = new SystemConfig(); systemConfig.setProperty(SystemConfigProperties.REFRESH_REGISTRY_CENTER_UUID); systemConfig.setValue(UUID.randomUUID().toString()); systemConfigService.insertOrUpdate(systemConfig); } @Override public RegistryCenterClient getCuratorByNameAndNamespace(String nameAndNamespace) { return registryCenterClientMap.get(nameAndNamespace); } @Override public boolean isDashboardLeader(String zkClusterKey) { DashboardLeaderHandler dashboardLeaderHandler = dashboardLeaderTreeCacheMap.get(zkClusterKey); if (dashboardLeaderHandler != null) { return dashboardLeaderHandler.isLeader(); } return false; } @Override public ZkCluster getZkCluster(String zkClusterKey) { return zkClusterMap.get(zkClusterKey); } @Transactional(rollbackFor = Exception.class) @Override public void createZkCluster(String zkClusterKey, String alias, String connectString) throws SaturnJobConsoleException { zkClusterInfoService.createZkCluster(zkClusterKey, alias, connectString, ""); notifyRefreshRegCenter(); } @Override public Collection<ZkCluster> getZkClusterList() { return zkClusterMap.values(); } @Override public List<ZkCluster> getOnlineZkClusterList() { List<ZkCluster> onlineClusters = Lists.newArrayList(); for (ZkCluster zkCluster : zkClusterMap.values()) { if (!zkCluster.isOffline()) { onlineClusters.add(zkCluster); } } return onlineClusters; } private List<String> getZkClusterKeys() { Collection<ZkCluster> zkClusters = getZkClusterList(); List<String> zkClusterKeys = Lists.newArrayList(); for (ZkCluster zkCluster : zkClusters) { zkClusterKeys.add(zkCluster.getZkClusterKey()); } return zkClusterKeys; } @Override public int domainCount(String zkClusterKey) { ZkCluster zkCluster = zkClusterMap.get(zkClusterKey); if (zkCluster != null) { ArrayList<RegistryCenterConfiguration> regList = zkCluster.getRegCenterConfList(); if (regList != null) { return regList.size(); } } return 0; } @Override public boolean namespaceIsCorrect(String namespace, CuratorFramework curatorFramework) throws SaturnJobConsoleException { if (SaturnSelfNodePath.ROOT_NAME.equals(namespace)) { return false; } try { String executorsPath = "/" + namespace + ExecutorNodePath.getExecutorNodePath(); if (curatorFramework.checkExists().forPath(executorsPath) != null) { return true; } String jobsPath = "/" + namespace + JobNodePath.get$JobsNodePath(); return curatorFramework.checkExists().forPath(jobsPath) != null ? true : false; } catch (Exception e) { log.error(e.getMessage(), e); return false; } } @Override public List<String> getNamespaces() throws SaturnJobConsoleException { return allOnlineNamespaces; } @Override public File exportNamespaceInfo(List<String> namespaceList) throws SaturnJobConsoleException { Set<String> targetNamespaceSet = null; if (namespaceList != null) { targetNamespaceSet = Sets.newHashSet(namespaceList); } List<RegistryCenterConfiguration> namespaceInfoList = Lists.newLinkedList(); Collection<ZkCluster> zkClusterList = getZkClusterList(); for (ZkCluster zkCluster : zkClusterList) { List<RegistryCenterConfiguration> namespacesOfZkCluster = zkCluster.getRegCenterConfList(); for (RegistryCenterConfiguration ns : namespacesOfZkCluster) { if (targetNamespaceSet == null || targetNamespaceSet.contains(ns.getNamespace())) { namespaceInfoList.add(ns); } } } return exportNamespaceInfo2Excel(namespaceInfoList); } /** * Export namespac */ private File exportNamespaceInfo2Excel(List<RegistryCenterConfiguration> namespaceInfoList) throws SaturnJobConsoleException { try { File tmpFile = SaturnConsoleUtils.createTmpFile(); WritableWorkbook writableWorkbook = Workbook.createWorkbook(tmpFile); WritableSheet sheet1 = writableWorkbook.createSheet("ns", 0); sheet1.addCell(new Label(0, 0, "域名")); sheet1.addCell(new Label(1, 0, "描述")); sheet1.addCell(new Label(2, 0, "重要等级")); sheet1.addCell(new Label(3, 0, "Executor版本")); sheet1.addCell(new Label(4, 0, "ZK集群")); for (int i = 0; i < namespaceInfoList.size(); i++) { RegistryCenterConfiguration namespaceInfo = namespaceInfoList.get(i); sheet1.addCell(new Label(0, i + 1, namespaceInfo.getNamespace())); sheet1.addCell(new Label(1, i + 1, namespaceInfo.getName())); sheet1.addCell(new Label(2, i + 1, namespaceInfo.getDegree())); sheet1.addCell(new Label(3, i + 1, namespaceInfo.getVersion())); sheet1.addCell(new Label(4, i + 1, namespaceInfo.getZkAlias())); } writableWorkbook.write(); writableWorkbook.close(); return tmpFile; } catch (Exception e) { throw new SaturnJobConsoleException(e); } } @Transactional(rollbackFor = { Exception.class }) @Override public void createNamespace(NamespaceDomainInfo namespaceDomainInfo) throws SaturnJobConsoleException { String namespace = namespaceDomainInfo.getNamespace(); String zkClusterKey = namespaceDomainInfo.getZkCluster(); ZkCluster currentCluster = getZkCluster(zkClusterKey); if (currentCluster == null) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, "not found zkcluster" + zkClusterKey)); } if (checkNamespaceExists(namespace)) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), String.format(ERR_MSG_NS_ALREADY_EXIST, namespace)); } try { // 创建 namespaceInfo NamespaceInfo namespaceInfo = constructNamespaceInfo(namespaceDomainInfo); namespaceInfoService.create(namespaceInfo); // 创建 zkcluster 和 namespaceInfo 关系 namespaceZkClusterMapping4SqlService.insert(namespace, "", zkClusterKey, NAMESPACE_CREATOR_NAME); // refresh notifyRefreshRegCenter(); } catch (Exception e) { log.error(e.getMessage(), e); throw new SaturnJobConsoleHttpException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, e.getMessage())); } } @Override public void updateNamespace(NamespaceDomainInfo namespaceDomainInfo) throws SaturnJobConsoleException { String namespace = namespaceDomainInfo.getNamespace(); if (!checkNamespaceExists(namespace)) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), ERR_MSG_NS_NOT_FOUND); } try { // 创建 namespaceInfo NamespaceInfo namespaceInfo = constructNamespaceInfo(namespaceDomainInfo); namespaceInfoService.update(namespaceInfo); // refresh notifyRefreshRegCenter(); } catch (Exception e) { log.error(e.getMessage(), e); throw new SaturnJobConsoleHttpException(HttpStatus.INTERNAL_SERVER_ERROR.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, e.getMessage())); } } @Override public NamespaceDomainInfo getNamespace(String namespace) throws SaturnJobConsoleException { if (namespaceInfoService.selectByNamespace(namespace) == null) { throw new SaturnJobConsoleHttpException(HttpStatus.NOT_FOUND.value(), ERR_MSG_NS_NOT_FOUND); } String zkClusterKey = namespaceZkClusterMapping4SqlService.getZkClusterKey(namespace); if (StringUtils.isBlank(zkClusterKey)) { throw new SaturnJobConsoleHttpException(HttpStatus.NOT_FOUND.value(), ERR_MSG_NS_NOT_FOUND); } NamespaceDomainInfo namespaceDomainInfo = new NamespaceDomainInfo(); namespaceDomainInfo.setNamespace(namespace); namespaceDomainInfo.setZkCluster(zkClusterKey); return namespaceDomainInfo; } @Transactional(rollbackFor = { Exception.class }) @Override public void bindNamespaceAndZkCluster(String namespace, String zkClusterKey, String updatedBy) throws SaturnJobConsoleException { ZkCluster currentCluster = getZkCluster(zkClusterKey); if (currentCluster == null) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), String.format(ERR_MSG_TEMPLATE_FAIL_TO_CREATE, namespace, "not found zkcluster" + zkClusterKey)); } // namespace必须要存在 if (!checkNamespaceExists(namespace)) { throw new SaturnJobConsoleHttpException(HttpStatus.BAD_REQUEST.value(), ERR_MSG_NS_NOT_FOUND); } // 判断其它集群是否有该域 String zkClusterKeyOther = namespaceZkClusterMapping4SqlService.getZkClusterKey(namespace); if (zkClusterKeyOther != null) { ZkCluster zkClusterOther = getZkCluster(zkClusterKeyOther); if (zkClusterOther == null) { throw new SaturnJobConsoleException("zk cluster 不存在:" + zkClusterKeyOther); } if (zkClusterOther.getZkClusterKey().equals(zkClusterKey)) { throw new SaturnJobConsoleException("Namespace已经存在于此zk集群,不能重复添加"); } else { throw new SaturnJobConsoleException( "Namespace存在于另外的zk集群:" + zkClusterOther.getZkClusterKey() + ",不能重复添加"); } } try { namespaceZkClusterMapping4SqlService.insert(namespace, "", zkClusterKey, updatedBy); postBindNamespaceAndZkCluster(namespace, currentCluster); // refresh notifyRefreshRegCenter(); } catch (Exception e) { namespaceZkClusterMapping4SqlService.remove(namespace, updatedBy); throw new SaturnJobConsoleException(e.getMessage()); } } protected void postBindNamespaceAndZkCluster(String namespace, ZkCluster currentCluster) throws SaturnJobConsoleException { // for subclass implement } private boolean checkNamespaceExists(String namespace) { if (namespaceInfoService.selectByNamespace(namespace) != null) { return true; } // 判断其它集群是否有该域 String zkClusterKeyOther = namespaceZkClusterMapping4SqlService.getZkClusterKey(namespace); return zkClusterKeyOther != null ? true : false; } private NamespaceInfo constructNamespaceInfo(NamespaceDomainInfo namespaceDomainInfo) { NamespaceInfo namespaceInfo = new NamespaceInfo(); namespaceInfo.setCreatedBy(NAMESPACE_CREATOR_NAME); namespaceInfo.setCreateTime(new Date()); namespaceInfo.setIsDeleted(0); namespaceInfo.setLastUpdatedBy(NAMESPACE_CREATOR_NAME); namespaceInfo.setLastUpdateTime(new Date()); namespaceInfo.setNamespace(namespaceDomainInfo.getNamespace()); namespaceInfo.setContent(namespaceDomainInfo.getContent()); return namespaceInfo; } @Override public CuratorRepository.CuratorFrameworkOp getCuratorFrameworkOp(String namespace) throws SaturnJobConsoleException { CuratorRepository.CuratorFrameworkOp curatorFrameworkOp = null; try { RegistryCenterConfiguration registryCenterConfiguration = findConfigByNamespace(namespace); if (registryCenterConfiguration == null) { throw new SaturnJobConsoleException("Connect zookeeper failed"); } String nns = registryCenterConfiguration.getNameAndNamespace(); if (nns == null) { throw new SaturnJobConsoleException("Connect zookeeper failed"); } String zkAddressList = registryCenterConfiguration.getZkAddressList(); String digest = registryCenterConfiguration.getDigest(); synchronized (getNnsLock(nns)) { if (!registryCenterClientMap.containsKey(nns)) { final RegistryCenterClient registryCenterClient = new RegistryCenterClient(); registryCenterClient.setNameAndNamespace(nns); registryCenterClient.setZkAddr(zkAddressList); CuratorFramework curatorFramework = curatorRepository.connect(zkAddressList, namespace, digest); if (curatorFramework != null) { registryCenterClient.setConnected(curatorFramework.getZookeeperClient().isConnected()); registryCenterClient.setCuratorClient(curatorFramework); registryCenterClientMap.put(nns, registryCenterClient); curatorFrameworkOp = curatorRepository.newCuratorFrameworkOp(curatorFramework); } } else { RegistryCenterClient registryCenterClient = registryCenterClientMap.get(nns); if (registryCenterClient != null) { CuratorFramework curatorFramework = registryCenterClient.getCuratorClient(); if (curatorFramework != null) { registryCenterClient.setConnected(curatorFramework.getZookeeperClient().isConnected()); curatorFrameworkOp = curatorRepository.newCuratorFrameworkOp(curatorFramework); } } } } } catch (SaturnJobConsoleException e) { throw e; } catch (Exception e) { throw new SaturnJobConsoleException(e); } if (curatorFrameworkOp == null) { throw new SaturnJobConsoleException("Connect zookeeper failed"); } return curatorFrameworkOp; } }
#384 Revert code, fix it in the next version.
saturn-console-api/src/main/java/com/vip/saturn/job/console/service/impl/RegistryCenterServiceImpl.java
#384 Revert code, fix it in the next version.
Java
apache-2.0
5b8f3f2849bcae474f6698df56562411dc349d59
0
canoo/dolphin-platform,canoo/dolphin-platform,canoo/dolphin-platform
/* * Copyright 2015-2017 Canoo Engineering AG. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.canoo.dolphin.server.controller; import com.canoo.dolphin.impl.ReflectionHelper; import com.canoo.dolphin.server.DolphinAction; import com.canoo.dolphin.server.DolphinModel; import com.canoo.dolphin.server.Param; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.List; /** * This class validates the DolphinController */ public class ControllerValidator { public void validate(Class<?> clazz) throws ControllerValidationException { if (isInterface(clazz)) { throw new ControllerValidationException("Dolphin Controller must be a class."); } if (isAbstract(clazz)) { throw new ControllerValidationException("Dolphin Controller can't be abstract."); } if (isFinal(clazz)) { throw new ControllerValidationException("Dolphin Controller can't be final."); } // if (!containsDefaultConstructor(clazz)) { // throw new ControllerValidationException("Dolphin Controller must contain a default constructor."); // } if (postConstructContainsParameter(clazz)) { throw new ControllerValidationException("PostConstruct method should not contain parameter."); } if (moreThanOnePostConstruct(clazz)) { throw new ControllerValidationException("Only one PostConstruct method is allowed."); } if (preDestroyContainsParameter(clazz)) { throw new ControllerValidationException("PreDestroy method should not contain parameter."); } if (moreThanOnePreDestroy(clazz)) { throw new ControllerValidationException("Only one PreDestroy method is allowed."); } if (!isDolphinActionVoid(clazz)) { throw new ControllerValidationException("DolphinAction must be void."); } if (!isAnnotatedWithParam(clazz)) { throw new ControllerValidationException("DolphinAction parameters must be annotated with @param."); } if (!dolphinModelPresent(clazz)) { throw new ControllerValidationException("Controller must have a DolphinModel."); } if(moreThanOneDolphinModel(clazz)){ throw new ControllerValidationException("Controller should not contain more than one DolphinModel."); } } private boolean isInterface(Class<?> clazz) { return clazz.isInterface(); } private boolean isAbstract(Class<?> clazz) { return Modifier.isAbstract(clazz.getModifiers()); } private boolean isFinal(Class<?> clazz) { return Modifier.isFinal(clazz.getModifiers()); } // private boolean containsDefaultConstructor(Class<?> clazz) { // for (Constructor<?> constructor : clazz.getConstructors()) { // if (constructor.getParameterTypes().length == 0 || constructor.isAnnotationPresent(Inject.class)) { // return true; // } // } // return false; // } private boolean postConstructContainsParameter(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PostConstruct.class)) { return checkParameterLength(method); } } return false; } private boolean preDestroyContainsParameter(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PreDestroy.class)) { return checkParameterLength(method); } } return false; } private boolean checkParameterLength(Method method) { if (method.getParameterTypes().length > 0) { return true; } else { return false; } } private boolean isDolphinActionVoid(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(DolphinAction.class)) { return method.getReturnType().equals(Void.TYPE); } } return true; } private boolean isAnnotatedWithParam(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(DolphinAction.class)) { Annotation[][] annotations = method.getParameterAnnotations(); for (int i = 0; i < annotations.length; i++) { if (annotations[i].length == 0 || annotations[i][0].annotationType() != Param.class) { return false; } } } } return true; } private boolean dolphinModelPresent(Class<?> clazz) { List<Field> fields = ReflectionHelper.getInheritedDeclaredFields(clazz); for (Field field : fields) { if (field.isAnnotationPresent(DolphinModel.class)) { return true; } } return false; } private boolean moreThanOneDolphinModel(Class<?> clazz) { List<Field> fields = ReflectionHelper.getInheritedDeclaredFields(clazz); int count = 0; for (Field field : fields) { if (field.isAnnotationPresent(DolphinModel.class)) { count++; } } return count > 1; } private boolean moreThanOnePreDestroy(Class<?> clazz) { int count = 0; List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PreDestroy.class)) { count++; } } return count > 1; } private boolean moreThanOnePostConstruct(Class<?> clazz) { int count = 0; List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PostConstruct.class)) { count++; } } return count > 1; } }
platform/dolphin-platform-server/src/main/java/com/canoo/dolphin/server/controller/ControllerValidator.java
/* * Copyright 2015-2017 Canoo Engineering AG. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.canoo.dolphin.server.controller; import com.canoo.dolphin.impl.ReflectionHelper; import com.canoo.dolphin.server.DolphinAction; import com.canoo.dolphin.server.DolphinModel; import com.canoo.dolphin.server.Param; import javax.annotation.PostConstruct; import javax.annotation.PreDestroy; import java.lang.annotation.Annotation; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.util.List; /** * This class validates the DolphinController */ public class ControllerValidator { public void validate(Class<?> clazz) throws ControllerValidationException { if (isInterface(clazz)) { throw new ControllerValidationException("Dolphin Controller must be a class."); } if (isAbstract(clazz)) { throw new ControllerValidationException("Dolphin Controller can't be abstract."); } if (isFinal(clazz)) { throw new ControllerValidationException("Dolphin Controller can't be final."); } // if (!containsDefaultConstructor(clazz)) { // throw new ControllerValidationException("Dolphin Controller must contain a default constructor."); // } if (postConstructContainsParameter(clazz)) { throw new ControllerValidationException("PostConstruct method should not contain parameter."); } if (moreThanOnePostConstruct(clazz)) { throw new ControllerValidationException("Only one PostConstruct method is allowed."); } if (preDestroyContainsParameter(clazz)) { throw new ControllerValidationException("PreDestroy method should not contain parameter."); } if (moreThanOnePreDestroy(clazz)) { throw new ControllerValidationException("Only one PreDestroy method is allowed."); } if (!isDolphinActionVoid(clazz)) { throw new ControllerValidationException("DolphinAction must be void."); } if (!isAnnotatedWithParam(clazz)) { throw new ControllerValidationException("DolphinAction parameters must be annotated with @param."); } if (!dolphinModelPresent(clazz)) { throw new ControllerValidationException("Controller must have a DolphinModel."); } } private boolean isInterface(Class<?> clazz) { return clazz.isInterface(); } private boolean isAbstract(Class<?> clazz) { return Modifier.isAbstract(clazz.getModifiers()); } private boolean isFinal(Class<?> clazz) { return Modifier.isFinal(clazz.getModifiers()); } // private boolean containsDefaultConstructor(Class<?> clazz) { // for (Constructor<?> constructor : clazz.getConstructors()) { // if (constructor.getParameterTypes().length == 0 || constructor.isAnnotationPresent(Inject.class)) { // return true; // } // } // return false; // } private boolean postConstructContainsParameter(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PostConstruct.class)) { return checkParameterLength(method); } } return false; } private boolean preDestroyContainsParameter(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PreDestroy.class)) { return checkParameterLength(method); } } return false; } private boolean checkParameterLength(Method method) { if (method.getParameterTypes().length > 0) { return true; } else { return false; } } private boolean isDolphinActionVoid(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(DolphinAction.class)) { return method.getReturnType().equals(Void.TYPE); } } return true; } private boolean isAnnotatedWithParam(Class<?> clazz) { List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(DolphinAction.class)) { Annotation[][] annotations = method.getParameterAnnotations(); for (int i = 0; i < annotations.length; i++) { if (annotations[i].length == 0 || annotations[i][0].annotationType() != Param.class) { return false; } } } } return true; } private boolean dolphinModelPresent(Class<?> clazz) { List<Field> fields = ReflectionHelper.getInheritedDeclaredFields(clazz); for (Field field : fields) { return field.isAnnotationPresent(DolphinModel.class); } return false; } private boolean moreThanOnePreDestroy(Class<?> clazz) { int count = 0; List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PreDestroy.class)) { count++; } } return count > 1; } private boolean moreThanOnePostConstruct(Class<?> clazz) { int count = 0; List<Method> methods = ReflectionHelper.getInheritedDeclaredMethods(clazz); for (Method method : methods) { if (method.isAnnotationPresent(PostConstruct.class)) { count++; } } return count > 1; } }
Fix COntrollerValidator for DolphinModel test
platform/dolphin-platform-server/src/main/java/com/canoo/dolphin/server/controller/ControllerValidator.java
Fix COntrollerValidator for DolphinModel test
Java
apache-2.0
2c782b86b54e1731f63923a2adbfbceddc022e7e
0
hhu94/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,hhu94/Synapse-Repository-Services,xschildw/Synapse-Repository-Services,Sage-Bionetworks/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,zimingd/Synapse-Repository-Services,xschildw/Synapse-Repository-Services
package org.sagebionetworks.repo.web.migration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.sagebionetworks.bridge.manager.participantdata.ParticipantDataIdMappingManagerImpl; import org.sagebionetworks.bridge.model.BridgeParticipantDAO; import org.sagebionetworks.bridge.model.BridgeUserParticipantMappingDAO; import org.sagebionetworks.bridge.model.Community; import org.sagebionetworks.bridge.model.CommunityTeamDAO; import org.sagebionetworks.bridge.model.ParticipantDataDAO; import org.sagebionetworks.bridge.model.ParticipantDataDescriptorDAO; import org.sagebionetworks.bridge.model.ParticipantDataId; import org.sagebionetworks.bridge.model.ParticipantDataStatusDAO; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnType; import org.sagebionetworks.bridge.model.data.ParticipantDataDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataRepeatType; import org.sagebionetworks.bridge.model.data.ParticipantDataRow; import org.sagebionetworks.bridge.model.data.ParticipantDataStatus; import org.sagebionetworks.bridge.model.data.value.ParticipantDataStringValue; import org.sagebionetworks.bridge.model.data.value.ParticipantDataValue; import org.sagebionetworks.evaluation.model.Evaluation; import org.sagebionetworks.evaluation.model.EvaluationStatus; import org.sagebionetworks.evaluation.model.Submission; import org.sagebionetworks.repo.manager.StorageQuotaManager; import org.sagebionetworks.repo.manager.UserManager; import org.sagebionetworks.repo.manager.UserProfileManager; import org.sagebionetworks.repo.manager.migration.MigrationManager; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessApproval; import org.sagebionetworks.repo.model.AccessRequirement; import org.sagebionetworks.repo.model.AuthenticationDAO; import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL; import org.sagebionetworks.repo.model.CommentDAO; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.DomainType; import org.sagebionetworks.repo.model.FileEntity; import org.sagebionetworks.repo.model.Folder; import org.sagebionetworks.repo.model.GroupMembersDAO; import org.sagebionetworks.repo.model.MembershipInvtnSubmission; import org.sagebionetworks.repo.model.MembershipInvtnSubmissionDAO; import org.sagebionetworks.repo.model.MembershipRqstSubmission; import org.sagebionetworks.repo.model.MembershipRqstSubmissionDAO; import org.sagebionetworks.repo.model.MessageDAO; import org.sagebionetworks.repo.model.ObjectType; import org.sagebionetworks.repo.model.Project; import org.sagebionetworks.repo.model.RestrictableObjectDescriptor; import org.sagebionetworks.repo.model.RestrictableObjectType; import org.sagebionetworks.repo.model.StorageQuotaAdminDao; import org.sagebionetworks.repo.model.Team; import org.sagebionetworks.repo.model.TeamDAO; import org.sagebionetworks.repo.model.TermsOfUseAccessApproval; import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement; import org.sagebionetworks.repo.model.UserGroup; import org.sagebionetworks.repo.model.UserGroupDAO; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.bootstrap.EntityBootstrapper; import org.sagebionetworks.repo.model.daemon.BackupRestoreStatus; import org.sagebionetworks.repo.model.daemon.DaemonStatus; import org.sagebionetworks.repo.model.daemon.RestoreSubmission; import org.sagebionetworks.repo.model.dao.FileHandleDao; import org.sagebionetworks.repo.model.dao.table.ColumnModelDAO; import org.sagebionetworks.repo.model.dao.table.TableRowTruthDAO; import org.sagebionetworks.repo.model.dbo.DBOBasicDao; import org.sagebionetworks.repo.model.dbo.dao.table.TableModelUtils; import org.sagebionetworks.repo.model.dbo.persistence.DBOSessionToken; import org.sagebionetworks.repo.model.dbo.persistence.DBOTermsOfUseAgreement; import org.sagebionetworks.repo.model.file.FileHandle; import org.sagebionetworks.repo.model.file.PreviewFileHandle; import org.sagebionetworks.repo.model.file.S3FileHandle; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.sagebionetworks.repo.model.message.Comment; import org.sagebionetworks.repo.model.message.MessageToUser; import org.sagebionetworks.repo.model.IdList; import org.sagebionetworks.repo.model.migration.ListBucketProvider; import org.sagebionetworks.repo.model.migration.MigrationType; import org.sagebionetworks.repo.model.migration.MigrationTypeCount; import org.sagebionetworks.repo.model.migration.MigrationTypeCounts; import org.sagebionetworks.repo.model.migration.MigrationTypeList; import org.sagebionetworks.repo.model.migration.MigrationUtils; import org.sagebionetworks.repo.model.migration.RowMetadata; import org.sagebionetworks.repo.model.migration.RowMetadataResult; import org.sagebionetworks.repo.model.provenance.Activity; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.Row; import org.sagebionetworks.repo.model.table.RowSet; import org.sagebionetworks.repo.model.v2.dao.V2WikiPageDao; import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.repo.web.controller.DispatchServletSingleton; import org.sagebionetworks.repo.web.controller.EntityServletTestHelper; import org.sagebionetworks.repo.web.controller.ServletTestHelper; import org.sagebionetworks.repo.web.service.ServiceProvider; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; /** * This is an integration test to test the migration of all tables from start to finish. * * The test does the following: 1. the before() method creates at least one object for every type object that must * migrate. 2. Create a backup copy of all data. 3. Delete all data in the system. 4. Restore all data from the backup. * * NOTE: Whenever a new migration type is added this test must be extended to test that objects migration. * * * * @author jmhill * */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:test-context.xml" }) public class MigrationIntegrationAutowireTest { public static final long MAX_WAIT_MS = 10 * 1000; // 10 sec. @Autowired private DBOBasicDao basicDao; @Autowired private EntityServletTestHelper entityServletHelper; @Autowired private UserManager userManager; @Autowired private FileHandleDao fileMetadataDao; @Autowired private UserProfileManager userProfileManager; @Autowired private ServiceProvider serviceProvider; @Autowired private EntityBootstrapper entityBootstrapper; @Autowired private MigrationManager migrationManager; @Autowired private StorageQuotaManager storageQuotaManager; @Autowired private StorageQuotaAdminDao storageQuotaAdminDao; @Autowired private UserGroupDAO userGroupDAO; @Autowired private GroupMembersDAO groupMembersDAO; @Autowired private TeamDAO teamDAO; @Autowired private CommunityTeamDAO communityTeamDAO; @Autowired private BridgeParticipantDAO bridgeParticipantDAO; @Autowired private BridgeUserParticipantMappingDAO bridgeUserParticipantMappingDAO; @Autowired private ParticipantDataDAO participantDataDAO; @Autowired private ParticipantDataDescriptorDAO participantDataDescriptorDAO; @Autowired private ParticipantDataStatusDAO participantDataStatusDAO; @Autowired private AuthenticationDAO authDAO; @Autowired private MessageDAO messageDAO; @Autowired private CommentDAO commentDAO; @Autowired private MembershipRqstSubmissionDAO membershipRqstSubmissionDAO; @Autowired private MembershipInvtnSubmissionDAO membershipInvtnSubmissionDAO; @Autowired private ColumnModelDAO columnModelDao; @Autowired private TableRowTruthDAO tableRowTruthDao; @Autowired private V2WikiPageDao v2wikiPageDAO; private Long adminUserId; private String adminUserIdString; private UserInfo adminUserInfo; // Activity private Activity activity; // Entities private Project project; private FileEntity fileEntity; private Community community; private Folder folderToTrash; // requirement private AccessRequirement accessRequirement; // approval private AccessApproval accessApproval; // V2 Wiki page private V2WikiPage v2RootWiki; private V2WikiPage v2SubWiki; // File Handles private S3FileHandle handleOne; private S3FileHandle markdownOne; private PreviewFileHandle preview; // Evaluation private Evaluation evaluation; private Submission submission; private HttpServletRequest mockRequest; private UserInfo newUser; @Before public void before() throws Exception { mockRequest = Mockito.mock(HttpServletRequest.class); when(mockRequest.getServletPath()).thenReturn("/repo/v1"); // get user IDs adminUserId = BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId(); adminUserIdString = adminUserId.toString(); adminUserInfo = userManager.getUserInfo(adminUserId); resetDatabase(); createNewUser(); String sampleFileHandleId = createFileHandles(); createActivity(); createEntities(); createFavorite(); createEvaluation(); createAccessRequirement(); createAccessApproval(); createV2WikiPages(); createDoi(); createStorageQuota(); UserGroup sampleGroup = createUserGroups(1); createTeamsRequestsAndInvitations(sampleGroup); createCredentials(sampleGroup); createSessionToken(sampleGroup); createTermsOfUseAgreement(sampleGroup); createMessages(sampleGroup, sampleFileHandleId); createColumnModel(); UserGroup sampleGroup2 = createUserGroups(2); createCommunity(sampleGroup2); createParticipantData(sampleGroup); } private void createColumnModel() throws DatastoreException, NotFoundException, IOException { String tableId = "syn123"; // Create some test column models List<ColumnModel> start = TableModelUtils.createOneOfEachType(); // Create each one List<ColumnModel> models = new LinkedList<ColumnModel>(); for (ColumnModel cm : start) { models.add(columnModelDao.createColumnModel(cm)); } List<String> header = TableModelUtils.getHeaders(models); // bind the columns to the entity columnModelDao.bindColumnToObject(header, tableId); // create some test rows. List<Row> rows = TableModelUtils.createRows(models, 5); RowSet set = new RowSet(); set.setHeaders(TableModelUtils.getHeaders(models)); set.setRows(rows); set.setTableId(tableId); // Append the rows to the table tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, models, set); // Append some more rows rows = TableModelUtils.createRows(models, 6); set.setRows(rows); tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, models, set); } public void createNewUser() throws NotFoundException { NewUser user = new NewUser(); user.setUserName(UUID.randomUUID().toString()); user.setEmail(user.getUserName() + "@test.com"); Long id = userManager.createUser(user); newUser = userManager.getUserInfo(id); } private void resetDatabase() throws Exception { // This gives us a chance to also delete the S3 for table rows tableRowTruthDao.truncateAllRowData(); // Before we start this test we want to start with a clean database migrationManager.deleteAllData(adminUserInfo); // bootstrap to put back the bootstrap data entityBootstrapper.bootstrapAll(); storageQuotaAdminDao.clear(); } private void createFavorite() { userProfileManager.addFavorite(adminUserInfo, fileEntity.getId()); } private void createDoi() throws Exception { serviceProvider.getDoiService().createDoi(adminUserId, project.getId(), ObjectType.ENTITY, 1L); } private void createActivity() throws Exception { activity = new Activity(); activity.setDescription("some desc"); activity = serviceProvider.getActivityService().createActivity(adminUserId, activity); } private void createEvaluation() throws Exception { // initialize Evaluations evaluation = new Evaluation(); evaluation.setName("name"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.PLANNED); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); evaluation = new Evaluation(); evaluation.setName("name2"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.OPEN); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); // initialize Participants serviceProvider.getEvaluationService().addParticipant(adminUserId, evaluation.getId()); // initialize Submissions submission = new Submission(); submission.setName("submission1"); submission.setVersionNumber(1L); submission.setEntityId(fileEntity.getId()); submission.setUserId(adminUserIdString); submission.setEvaluationId(evaluation.getId()); submission = entityServletHelper.createSubmission(submission, adminUserId, fileEntity.getEtag()); } public void createAccessApproval() throws Exception { accessApproval = newToUAccessApproval(accessRequirement.getId(), adminUserIdString); accessApproval = ServletTestHelper.createAccessApproval(DispatchServletSingleton.getInstance(), accessApproval, adminUserId, new HashMap<String, String>()); } public void createAccessRequirement() throws Exception { // Add an access requirement to this entity accessRequirement = newAccessRequirement(); String entityId = project.getId(); RestrictableObjectDescriptor entitySubjectId = new RestrictableObjectDescriptor(); entitySubjectId.setId(entityId); entitySubjectId.setType(RestrictableObjectType.ENTITY); RestrictableObjectDescriptor evaluationSubjectId = new RestrictableObjectDescriptor(); assertNotNull(evaluation); assertNotNull(evaluation.getId()); evaluationSubjectId.setId(evaluation.getId()); evaluationSubjectId.setType(RestrictableObjectType.EVALUATION); accessRequirement.setSubjectIds(Arrays.asList(new RestrictableObjectDescriptor[] { entitySubjectId, evaluationSubjectId })); accessRequirement = ServletTestHelper.createAccessRequirement(DispatchServletSingleton.getInstance(), accessRequirement, adminUserId, new HashMap<String, String>()); } private TermsOfUseAccessApproval newToUAccessApproval(Long requirementId, String accessorId) { TermsOfUseAccessApproval aa = new TermsOfUseAccessApproval(); aa.setAccessorId(accessorId); aa.setEntityType(TermsOfUseAccessApproval.class.getName()); aa.setRequirementId(requirementId); return aa; } public void createV2WikiPages() throws NotFoundException { // Using wikiPageDao until wiki service is created // Create a V2 Wiki page v2RootWiki = new V2WikiPage(); v2RootWiki.setCreatedBy(adminUserIdString); v2RootWiki.setModifiedBy(adminUserIdString); v2RootWiki.setAttachmentFileHandleIds(new LinkedList<String>()); v2RootWiki.getAttachmentFileHandleIds().add(handleOne.getId()); v2RootWiki.setTitle("Root title"); v2RootWiki.setMarkdownFileHandleId(markdownOne.getId()); Map<String, FileHandle> map = new HashMap<String, FileHandle>(); map.put(handleOne.getFileName(), handleOne); List<String> newIds = new ArrayList<String>(); newIds.add(handleOne.getId()); v2RootWiki = v2wikiPageDAO.create(v2RootWiki, map, fileEntity.getId(), ObjectType.ENTITY, newIds); // Create a child v2SubWiki = new V2WikiPage(); v2SubWiki.setCreatedBy(adminUserIdString); v2SubWiki.setModifiedBy(adminUserIdString); v2SubWiki.setParentWikiId(v2RootWiki.getId()); v2SubWiki.setTitle("V2 Sub-wiki-title"); v2SubWiki.setMarkdownFileHandleId(markdownOne.getId()); v2SubWiki = v2wikiPageDAO.create(v2SubWiki, new HashMap<String, FileHandle>(), fileEntity.getId(), ObjectType.ENTITY, new ArrayList<String>()); } /** * Create the entities used by this test. * * @throws JSONObjectAdapterException * @throws ServletException * @throws IOException * @throws NotFoundException */ public void createEntities() throws JSONObjectAdapterException, ServletException, IOException, NotFoundException { // Create a project project = new Project(); project.setName("MigrationIntegrationAutowireTest.Project"); project.setEntityType(Project.class.getName()); project = serviceProvider.getEntityService().createEntity(adminUserId, project, null, mockRequest); // Create a file entity fileEntity = new FileEntity(); fileEntity.setName("MigrationIntegrationAutowireTest.FileEntity"); fileEntity.setEntityType(FileEntity.class.getName()); fileEntity.setParentId(project.getId()); fileEntity.setDataFileHandleId(handleOne.getId()); fileEntity = serviceProvider.getEntityService().createEntity(adminUserId, fileEntity, activity.getId(), mockRequest); // Create a folder to trash folderToTrash = new Folder(); folderToTrash.setName("boundForTheTrashCan"); folderToTrash.setParentId(project.getId()); folderToTrash = serviceProvider.getEntityService().createEntity(adminUserId, folderToTrash, null, mockRequest); // Send it to the trash can serviceProvider.getTrashService().moveToTrash(adminUserId, folderToTrash.getId()); } private AccessRequirement newAccessRequirement() { TermsOfUseAccessRequirement dto = new TermsOfUseAccessRequirement(); dto.setEntityType(dto.getClass().getName()); dto.setAccessType(ACCESS_TYPE.DOWNLOAD); dto.setTermsOfUse("foo"); return dto; } /** * Create the file handles used by this test. * * @throws NotFoundException */ public String createFileHandles() throws NotFoundException { // Create a file handle handleOne = new S3FileHandle(); handleOne.setCreatedBy(adminUserIdString); handleOne.setCreatedOn(new Date()); handleOne.setBucketName("bucket"); handleOne.setKey("mainFileKey"); handleOne.setEtag("etag"); handleOne.setFileName("foo.bar"); handleOne = fileMetadataDao.createFile(handleOne); // Create markdown content markdownOne = new S3FileHandle(); markdownOne.setCreatedBy(adminUserIdString); markdownOne.setCreatedOn(new Date()); markdownOne.setBucketName("bucket"); markdownOne.setKey("markdownFileKey"); markdownOne.setEtag("etag"); markdownOne.setFileName("markdown1"); markdownOne = fileMetadataDao.createFile(markdownOne); // Create a preview preview = new PreviewFileHandle(); preview.setCreatedBy(adminUserIdString); preview.setCreatedOn(new Date()); preview.setBucketName("bucket"); preview.setKey("previewFileKey"); preview.setEtag("etag"); preview.setFileName("bar.txt"); preview = fileMetadataDao.createFile(preview); // Set two as the preview of one fileMetadataDao.setPreviewId(handleOne.getId(), preview.getId()); return handleOne.getId(); } private void createStorageQuota() { storageQuotaManager.setQuotaForUser(adminUserInfo, adminUserInfo, 3000); } // returns a group for use in a team private UserGroup createUserGroups(int index) throws NotFoundException { List<String> adder = new ArrayList<String>(); // Make one group UserGroup parentGroup = new UserGroup(); parentGroup.setIsIndividual(false); parentGroup.setId(userGroupDAO.create(parentGroup).toString()); // Make two users UserGroup parentUser = new UserGroup(); parentUser.setIsIndividual(true); parentUser.setId(userGroupDAO.create(parentUser).toString()); UserGroup siblingUser = new UserGroup(); siblingUser.setIsIndividual(true); siblingUser.setId(userGroupDAO.create(siblingUser).toString()); // Nest one group and two users within the parent group adder.add(parentUser.getId()); adder.add(siblingUser.getId()); groupMembersDAO.addMembers(parentGroup.getId(), adder); return parentGroup; } private void createCredentials(UserGroup group) throws Exception { Long principalId = Long.parseLong(group.getId()); authDAO.changePassword(principalId, "ThisIsMySuperSecurePassword"); authDAO.changeSecretKey(principalId); authDAO.changeSessionToken(principalId, null); } private void createSessionToken(UserGroup group) throws Exception { DBOSessionToken token = new DBOSessionToken(); token.setDomain(DomainType.SYNAPSE.name()); token.setPrincipalId(Long.parseLong(group.getId())); token.setSessionToken(UUID.randomUUID().toString()); token.setValidatedOn(new Date()); basicDao.createNew(token); } private void createTermsOfUseAgreement(UserGroup group) throws Exception { DBOTermsOfUseAgreement tou = new DBOTermsOfUseAgreement(); tou.setPrincipalId(Long.parseLong(group.getId())); tou.setAgreesToTermsOfUse(Boolean.TRUE); tou.setDomain(DomainType.SYNAPSE.name()); basicDao.createNew(tou); } @SuppressWarnings("serial") private void createMessages(final UserGroup group, String fileHandleId) { MessageToUser dto = new MessageToUser(); // Note: ID is auto generated dto.setCreatedBy(group.getId()); dto.setFileHandleId(fileHandleId); // Note: CreatedOn is set by the DAO dto.setSubject("See you on the other side?"); dto.setRecipients(new HashSet<String>() { { add(group.getId()); } }); dto.setInReplyTo(null); // Note: InReplyToRoot is calculated by the DAO dto = messageDAO.createMessage(dto); messageDAO.createMessageStatus_NewTransaction(dto.getId(), group.getId(), null); Comment dto2 = new Comment(); dto2.setCreatedBy(group.getId()); dto2.setFileHandleId(fileHandleId); dto2.setTargetId("1337"); dto2.setTargetType(ObjectType.ENTITY); commentDAO.createComment(dto2); } private void createTeamsRequestsAndInvitations(UserGroup group) { String otherUserId = BOOTSTRAP_PRINCIPAL.ANONYMOUS_USER.getPrincipalId().toString(); Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); teamDAO.create(team); // create a MembershipRqstSubmission MembershipRqstSubmission mrs = new MembershipRqstSubmission(); Date createdOn = new Date(); Date expiresOn = new Date(); mrs.setCreatedOn(createdOn); mrs.setExpiresOn(expiresOn); mrs.setMessage("Please let me join the team."); mrs.setTeamId("" + group.getId()); // need another valid user group mrs.setUserId(otherUserId); membershipRqstSubmissionDAO.create(mrs); // create a MembershipInvtnSubmission MembershipInvtnSubmission mis = new MembershipInvtnSubmission(); mis.setCreatedOn(createdOn); mis.setExpiresOn(expiresOn); mis.setMessage("Please join the team."); mis.setTeamId("" + group.getId()); // need another valid user group mis.setInviteeId(otherUserId); membershipInvtnSubmissionDAO.create(mis); } private void createCommunity(UserGroup group) throws Exception { Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); team = teamDAO.create(team); // Create a community community = new Community(); community.setName("MigrationIntegrationAutowireTest.Community"); community.setEntityType(Community.class.getName()); community.setTeamId(team.getId()); community = serviceProvider.getEntityService().createEntity(adminUserId, community, null, mockRequest); communityTeamDAO.create(KeyFactory.stringToKey(community.getId()), Long.parseLong(team.getId())); } private void createParticipantData(UserGroup sampleGroup) throws Exception { Long participantId = Long.parseLong(sampleGroup.getId()) ^ -1L; bridgeParticipantDAO.create(participantId); bridgeUserParticipantMappingDAO.setParticipantIdsForUser(Long.parseLong(sampleGroup.getId()), Collections.<ParticipantDataId> singletonList(new ParticipantDataId(participantId))); ParticipantDataDescriptor participantDataDescriptor = new ParticipantDataDescriptor(); participantDataDescriptor.setName(participantId.toString() + "desc"); participantDataDescriptor.setRepeatType(ParticipantDataRepeatType.ALWAYS); participantDataDescriptor.setRepeatFrequency("0 0 4 * * ? *"); participantDataDescriptor = participantDataDescriptorDAO.createParticipantDataDescriptor(participantDataDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor.setName("a"); participantDataColumnDescriptor.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor2 = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor2.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor2.setName("b"); participantDataColumnDescriptor2.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor2); ParticipantDataRow dataRow = new ParticipantDataRow(); ParticipantDataStringValue stringValue1 = new ParticipantDataStringValue(); stringValue1.setValue("1"); ParticipantDataStringValue stringValue2 = new ParticipantDataStringValue(); stringValue2.setValue("2"); dataRow.setData(ImmutableMap.<String, ParticipantDataValue> builder().put("a", stringValue1).put("b", stringValue2).build()); List<ParticipantDataRow> data = Lists.newArrayList(dataRow); participantDataDAO.append(new ParticipantDataId(participantId), participantDataDescriptor.getId(), data, Lists.newArrayList(participantDataColumnDescriptor, participantDataColumnDescriptor2)); ParticipantDataStatus status = new ParticipantDataStatus(); status.setParticipantDataDescriptorId(participantDataDescriptor.getId()); status.setLastEntryComplete(false); status.setLastPrompted(new Date()); status.setLastStarted(new Date()); participantDataStatusDAO.update(Collections.<ParticipantDataStatus> singletonList(status), ImmutableMap .<String, ParticipantDataId> builder().put(participantDataDescriptor.getId(), new ParticipantDataId(participantId)).build()); } @After public void after() throws Exception { // to cleanup for this test we delete all in the database resetDatabase(); } /** * This is the actual test. The rest of the class is setup and tear down. * * @throws Exception */ @Test public void testRoundTrip() throws Exception { // Get the list of primary types MigrationTypeList primaryTypesList = entityServletHelper.getPrimaryMigrationTypes(adminUserId); assertNotNull(primaryTypesList); assertNotNull(primaryTypesList.getList()); assertTrue(primaryTypesList.getList().size() > 0); // Get the counts before we start MigrationTypeCounts startCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); validateStartingCount(startCounts); // This test will backup all data, delete it, then restore it. List<BackupInfo> backupList = new ArrayList<BackupInfo>(); for (MigrationType type : primaryTypesList.getList()) { // Backup each type backupList.addAll(backupAllOfType(type)); } // Now delete all data in reverse order for (int i = primaryTypesList.getList().size() - 1; i >= 0; i--) { MigrationType type = primaryTypesList.getList().get(i); deleteAllOfType(type); } // After deleting, the counts should be 0 except for a few special cases MigrationTypeCounts afterDeleteCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); assertNotNull(afterDeleteCounts); assertNotNull(afterDeleteCounts.getList()); for (int i = 0; i < afterDeleteCounts.getList().size(); i++) { MigrationTypeCount afterDelete = afterDeleteCounts.getList().get(i); // Special cases for the not-deleted migration admin if (afterDelete.getType() == MigrationType.PRINCIPAL) { assertEquals("There should be 4 UserGroups remaining after the delete: " + BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER + ", " + BOOTSTRAP_PRINCIPAL.ADMINISTRATORS_GROUP + ", " + BOOTSTRAP_PRINCIPAL.PUBLIC_GROUP + ", and " + BOOTSTRAP_PRINCIPAL.AUTHENTICATED_USERS_GROUP, new Long(4), afterDelete.getCount()); } else if (afterDelete.getType() == MigrationType.GROUP_MEMBERS || afterDelete.getType() == MigrationType.CREDENTIAL) { assertEquals("Counts do not match for: " + afterDelete.getType().name(), new Long(1), afterDelete.getCount()); } else { assertEquals("Counts are non-zero for: " + afterDelete.getType().name(), new Long(0), afterDelete.getCount()); } } // Now restore all of the data for (BackupInfo info : backupList) { String fileName = info.getFileName(); assertNotNull("Did not find a backup file name for type: " + info.getType(), fileName); restoreFromBackup(info.getType(), fileName); } // The counts should all be back MigrationTypeCounts finalCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); for (int i = 1; i < finalCounts.getList().size(); i++) { MigrationTypeCount startCount = startCounts.getList().get(i); MigrationTypeCount afterRestore = finalCounts.getList().get(i); assertEquals("Count for " + startCount.getType().name() + " does not match", startCount.getCount(), afterRestore.getCount()); } } private static class BackupInfo { MigrationType type; String fileName; public BackupInfo(MigrationType type, String fileName) { super(); this.type = type; this.fileName = fileName; } public MigrationType getType() { return type; } public String getFileName() { return fileName; } } /** * There must be at least one object for every type of migratable object. * * @param startCounts */ private void validateStartingCount(MigrationTypeCounts startCounts) { assertNotNull(startCounts); assertNotNull(startCounts.getList()); List<MigrationType> typesToMigrate = new LinkedList<MigrationType>(); for (MigrationType tm : MigrationType.values()) { if (migrationManager.isMigrationTypeUsed(adminUserInfo, tm)) { typesToMigrate.add(tm); } } assertEquals( "This test requires at least one object to exist for each MigrationType. Please create a new object of the new MigrationType in the before() method of this test.", typesToMigrate.size(), startCounts.getList().size()); for (MigrationTypeCount count : startCounts.getList()) { assertTrue("This test requires at least one object to exist for each MigrationType. Please create a new object of type: " + count.getType() + " in the before() method of this test.", count.getCount() > 0); } } /** * Extract the filename from the full url. * * @param fullUrl * @return */ public String getFileNameFromUrl(String fullUrl) { ; int index = fullUrl.lastIndexOf("/"); return fullUrl.substring(index + 1, fullUrl.length()); } /** * Backup all data * * @param type * @return * @throws Exception */ private List<BackupInfo> backupAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if (list == null) return null; // Backup batches by their level in the tree ListBucketProvider provider = new ListBucketProvider(); MigrationUtils.bucketByTreeLevel(list.getList().iterator(), provider); List<BackupInfo> result = new ArrayList<BackupInfo>(); List<List<Long>> listOfBuckets = provider.getListOfBuckets(); for (List<Long> batch : listOfBuckets) { if (batch.size() > 0) { String fileName = backup(type, batch); result.add(new BackupInfo(type, fileName)); } } return result; } private String backup(MigrationType type, List<Long> tobackup) throws Exception { // Start the backup job IdList ids = new IdList(); ids.setList(tobackup); BackupRestoreStatus status = entityServletHelper.startBackup(adminUserId, type, ids); // wait for it.. waitForDaemon(status); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); assertNotNull(status.getBackupUrl()); return getFileNameFromUrl(status.getBackupUrl()); } private void restoreFromBackup(MigrationType type, String fileName) throws Exception { RestoreSubmission sub = new RestoreSubmission(); sub.setFileName(fileName); BackupRestoreStatus status = entityServletHelper.startRestore(adminUserId, type, sub); // wait for it waitForDaemon(status); } /** * Delete all data for a type. * * @param type * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private void deleteAllOfType(MigrationType type) throws Exception { IdList idList = getIdListOfAllOfType(type); if (idList == null) return; MigrationTypeCount result = entityServletHelper.deleteMigrationType(adminUserId, type, idList); System.out.println("Deleted: " + result); } /** * List all of the IDs for a type. * * @param type * @return * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private IdList getIdListOfAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if (list.getTotalCount() < 1) return null; // Create the backup list List<Long> toBackup = new LinkedList<Long>(); for (RowMetadata row : list.getList()) { toBackup.add(row.getId()); } IdList idList = new IdList(); idList.setList(toBackup); return idList; } /** * Wait for a deamon to process a a job. * * @param status * @throws InterruptedException * @throws JSONObjectAdapterException * @throws IOException * @throws ServletException */ private void waitForDaemon(BackupRestoreStatus status) throws Exception { long start = System.currentTimeMillis(); while (DaemonStatus.COMPLETED != status.getStatus()) { assertFalse("Daemon failed " + status.getErrorDetails(), DaemonStatus.FAILED == status.getStatus()); System.out.println("Waiting for backup/restore daemon. Message: " + status.getProgresssMessage()); Thread.sleep(1000); long elapse = System.currentTimeMillis() - start; assertTrue("Timed out waiting for a backup/restore daemon", elapse < MAX_WAIT_MS); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); } } }
services/repository/src/test/java/org/sagebionetworks/repo/web/migration/MigrationIntegrationAutowireTest.java
package org.sagebionetworks.repo.web.migration; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.when; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.UUID; import javax.servlet.ServletException; import javax.servlet.http.HttpServletRequest; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.mockito.Mockito; import org.sagebionetworks.bridge.manager.participantdata.ParticipantDataIdMappingManagerImpl; import org.sagebionetworks.bridge.model.BridgeParticipantDAO; import org.sagebionetworks.bridge.model.BridgeUserParticipantMappingDAO; import org.sagebionetworks.bridge.model.Community; import org.sagebionetworks.bridge.model.CommunityTeamDAO; import org.sagebionetworks.bridge.model.ParticipantDataDAO; import org.sagebionetworks.bridge.model.ParticipantDataDescriptorDAO; import org.sagebionetworks.bridge.model.ParticipantDataId; import org.sagebionetworks.bridge.model.ParticipantDataStatusDAO; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataColumnType; import org.sagebionetworks.bridge.model.data.ParticipantDataDescriptor; import org.sagebionetworks.bridge.model.data.ParticipantDataRepeatType; import org.sagebionetworks.bridge.model.data.ParticipantDataRow; import org.sagebionetworks.bridge.model.data.ParticipantDataStatus; import org.sagebionetworks.bridge.model.data.value.ParticipantDataStringValue; import org.sagebionetworks.bridge.model.data.value.ParticipantDataValue; import org.sagebionetworks.evaluation.model.Evaluation; import org.sagebionetworks.evaluation.model.EvaluationStatus; import org.sagebionetworks.evaluation.model.Submission; import org.sagebionetworks.repo.manager.StorageQuotaManager; import org.sagebionetworks.repo.manager.UserManager; import org.sagebionetworks.repo.manager.UserProfileManager; import org.sagebionetworks.repo.manager.migration.MigrationManager; import org.sagebionetworks.repo.model.ACCESS_TYPE; import org.sagebionetworks.repo.model.AccessApproval; import org.sagebionetworks.repo.model.AccessRequirement; import org.sagebionetworks.repo.model.AuthenticationDAO; import org.sagebionetworks.repo.model.AuthorizationConstants.BOOTSTRAP_PRINCIPAL; import org.sagebionetworks.repo.model.CommentDAO; import org.sagebionetworks.repo.model.DatastoreException; import org.sagebionetworks.repo.model.FileEntity; import org.sagebionetworks.repo.model.Folder; import org.sagebionetworks.repo.model.GroupMembersDAO; import org.sagebionetworks.repo.model.MembershipInvtnSubmission; import org.sagebionetworks.repo.model.MembershipInvtnSubmissionDAO; import org.sagebionetworks.repo.model.MembershipRqstSubmission; import org.sagebionetworks.repo.model.MembershipRqstSubmissionDAO; import org.sagebionetworks.repo.model.MessageDAO; import org.sagebionetworks.repo.model.ObjectType; import org.sagebionetworks.repo.model.Project; import org.sagebionetworks.repo.model.RestrictableObjectDescriptor; import org.sagebionetworks.repo.model.RestrictableObjectType; import org.sagebionetworks.repo.model.StorageQuotaAdminDao; import org.sagebionetworks.repo.model.Team; import org.sagebionetworks.repo.model.TeamDAO; import org.sagebionetworks.repo.model.TermsOfUseAccessApproval; import org.sagebionetworks.repo.model.TermsOfUseAccessRequirement; import org.sagebionetworks.repo.model.UserGroup; import org.sagebionetworks.repo.model.UserGroupDAO; import org.sagebionetworks.repo.model.UserInfo; import org.sagebionetworks.repo.model.auth.NewUser; import org.sagebionetworks.repo.model.bootstrap.EntityBootstrapper; import org.sagebionetworks.repo.model.daemon.BackupRestoreStatus; import org.sagebionetworks.repo.model.daemon.DaemonStatus; import org.sagebionetworks.repo.model.daemon.RestoreSubmission; import org.sagebionetworks.repo.model.dao.FileHandleDao; import org.sagebionetworks.repo.model.dao.table.ColumnModelDAO; import org.sagebionetworks.repo.model.dao.table.TableRowTruthDAO; import org.sagebionetworks.repo.model.dbo.dao.table.TableModelUtils; import org.sagebionetworks.repo.model.file.FileHandle; import org.sagebionetworks.repo.model.file.PreviewFileHandle; import org.sagebionetworks.repo.model.file.S3FileHandle; import org.sagebionetworks.repo.model.jdo.KeyFactory; import org.sagebionetworks.repo.model.message.Comment; import org.sagebionetworks.repo.model.message.MessageToUser; import org.sagebionetworks.repo.model.IdList; import org.sagebionetworks.repo.model.migration.ListBucketProvider; import org.sagebionetworks.repo.model.migration.MigrationType; import org.sagebionetworks.repo.model.migration.MigrationTypeCount; import org.sagebionetworks.repo.model.migration.MigrationTypeCounts; import org.sagebionetworks.repo.model.migration.MigrationTypeList; import org.sagebionetworks.repo.model.migration.MigrationUtils; import org.sagebionetworks.repo.model.migration.RowMetadata; import org.sagebionetworks.repo.model.migration.RowMetadataResult; import org.sagebionetworks.repo.model.provenance.Activity; import org.sagebionetworks.repo.model.table.ColumnModel; import org.sagebionetworks.repo.model.table.Row; import org.sagebionetworks.repo.model.table.RowSet; import org.sagebionetworks.repo.model.v2.dao.V2WikiPageDao; import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage; import org.sagebionetworks.repo.web.NotFoundException; import org.sagebionetworks.repo.web.controller.DispatchServletSingleton; import org.sagebionetworks.repo.web.controller.EntityServletTestHelper; import org.sagebionetworks.repo.web.controller.ServletTestHelper; import org.sagebionetworks.repo.web.service.ServiceProvider; import org.sagebionetworks.schema.adapter.JSONObjectAdapterException; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.test.context.ContextConfiguration; import org.springframework.test.context.junit4.SpringJUnit4ClassRunner; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Lists; /** * This is an integration test to test the migration of all tables from start to finish. * * The test does the following: 1. the before() method creates at least one object for every type object that must * migrate. 2. Create a backup copy of all data. 3. Delete all data in the system. 4. Restore all data from the backup. * * NOTE: Whenever a new migration type is added this test must be extended to test that objects migration. * * * * @author jmhill * */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration(locations = { "classpath:test-context.xml" }) public class MigrationIntegrationAutowireTest { public static final long MAX_WAIT_MS = 10 * 1000; // 10 sec. @Autowired private EntityServletTestHelper entityServletHelper; @Autowired private UserManager userManager; @Autowired private FileHandleDao fileMetadataDao; @Autowired private UserProfileManager userProfileManager; @Autowired private ServiceProvider serviceProvider; @Autowired private EntityBootstrapper entityBootstrapper; @Autowired private MigrationManager migrationManager; @Autowired private StorageQuotaManager storageQuotaManager; @Autowired private StorageQuotaAdminDao storageQuotaAdminDao; @Autowired private UserGroupDAO userGroupDAO; @Autowired private GroupMembersDAO groupMembersDAO; @Autowired private TeamDAO teamDAO; @Autowired private CommunityTeamDAO communityTeamDAO; @Autowired private BridgeParticipantDAO bridgeParticipantDAO; @Autowired private BridgeUserParticipantMappingDAO bridgeUserParticipantMappingDAO; @Autowired private ParticipantDataDAO participantDataDAO; @Autowired private ParticipantDataDescriptorDAO participantDataDescriptorDAO; @Autowired private ParticipantDataStatusDAO participantDataStatusDAO; @Autowired private AuthenticationDAO authDAO; @Autowired private MessageDAO messageDAO; @Autowired private CommentDAO commentDAO; @Autowired private MembershipRqstSubmissionDAO membershipRqstSubmissionDAO; @Autowired private MembershipInvtnSubmissionDAO membershipInvtnSubmissionDAO; @Autowired private ColumnModelDAO columnModelDao; @Autowired private TableRowTruthDAO tableRowTruthDao; @Autowired private V2WikiPageDao v2wikiPageDAO; private Long adminUserId; private String adminUserIdString; private UserInfo adminUserInfo; // Activity private Activity activity; // Entities private Project project; private FileEntity fileEntity; private Community community; private Folder folderToTrash; // requirement private AccessRequirement accessRequirement; // approval private AccessApproval accessApproval; // V2 Wiki page private V2WikiPage v2RootWiki; private V2WikiPage v2SubWiki; // File Handles private S3FileHandle handleOne; private S3FileHandle markdownOne; private PreviewFileHandle preview; // Evaluation private Evaluation evaluation; private Submission submission; private HttpServletRequest mockRequest; private UserInfo newUser; @Before public void before() throws Exception { mockRequest = Mockito.mock(HttpServletRequest.class); when(mockRequest.getServletPath()).thenReturn("/repo/v1"); // get user IDs adminUserId = BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER.getPrincipalId(); adminUserIdString = adminUserId.toString(); adminUserInfo = userManager.getUserInfo(adminUserId); resetDatabase(); createNewUser(); String sampleFileHandleId = createFileHandles(); createActivity(); createEntities(); createFavorite(); createEvaluation(); createAccessRequirement(); createAccessApproval(); createV2WikiPages(); createDoi(); createStorageQuota(); UserGroup sampleGroup = createUserGroups(1); createTeamsRequestsAndInvitations(sampleGroup); createCredentials(sampleGroup); createMessages(sampleGroup, sampleFileHandleId); createColumnModel(); UserGroup sampleGroup2 = createUserGroups(2); createCommunity(sampleGroup2); createParticipantData(sampleGroup); } private void createColumnModel() throws DatastoreException, NotFoundException, IOException { String tableId = "syn123"; // Create some test column models List<ColumnModel> start = TableModelUtils.createOneOfEachType(); // Create each one List<ColumnModel> models = new LinkedList<ColumnModel>(); for (ColumnModel cm : start) { models.add(columnModelDao.createColumnModel(cm)); } List<String> header = TableModelUtils.getHeaders(models); // bind the columns to the entity columnModelDao.bindColumnToObject(header, tableId); // create some test rows. List<Row> rows = TableModelUtils.createRows(models, 5); RowSet set = new RowSet(); set.setHeaders(TableModelUtils.getHeaders(models)); set.setRows(rows); set.setTableId(tableId); // Append the rows to the table tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, models, set); // Append some more rows rows = TableModelUtils.createRows(models, 6); set.setRows(rows); tableRowTruthDao.appendRowSetToTable(adminUserIdString, tableId, models, set); } public void createNewUser() throws NotFoundException { NewUser user = new NewUser(); user.setUserName(UUID.randomUUID().toString()); user.setEmail(user.getUserName() + "@test.com"); Long id = userManager.createUser(user); newUser = userManager.getUserInfo(id); } private void resetDatabase() throws Exception { // This gives us a chance to also delete the S3 for table rows tableRowTruthDao.truncateAllRowData(); // Before we start this test we want to start with a clean database migrationManager.deleteAllData(adminUserInfo); // bootstrap to put back the bootstrap data entityBootstrapper.bootstrapAll(); storageQuotaAdminDao.clear(); } private void createFavorite() { userProfileManager.addFavorite(adminUserInfo, fileEntity.getId()); } private void createDoi() throws Exception { serviceProvider.getDoiService().createDoi(adminUserId, project.getId(), ObjectType.ENTITY, 1L); } private void createActivity() throws Exception { activity = new Activity(); activity.setDescription("some desc"); activity = serviceProvider.getActivityService().createActivity(adminUserId, activity); } private void createEvaluation() throws Exception { // initialize Evaluations evaluation = new Evaluation(); evaluation.setName("name"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.PLANNED); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); evaluation = new Evaluation(); evaluation.setName("name2"); evaluation.setDescription("description"); evaluation.setContentSource(project.getId()); evaluation.setStatus(EvaluationStatus.OPEN); evaluation.setSubmissionInstructionsMessage("instructions"); evaluation.setSubmissionReceiptMessage("receipt"); evaluation = serviceProvider.getEvaluationService().createEvaluation(adminUserId, evaluation); // initialize Participants serviceProvider.getEvaluationService().addParticipant(adminUserId, evaluation.getId()); // initialize Submissions submission = new Submission(); submission.setName("submission1"); submission.setVersionNumber(1L); submission.setEntityId(fileEntity.getId()); submission.setUserId(adminUserIdString); submission.setEvaluationId(evaluation.getId()); submission = entityServletHelper.createSubmission(submission, adminUserId, fileEntity.getEtag()); } public void createAccessApproval() throws Exception { accessApproval = newToUAccessApproval(accessRequirement.getId(), adminUserIdString); accessApproval = ServletTestHelper.createAccessApproval(DispatchServletSingleton.getInstance(), accessApproval, adminUserId, new HashMap<String, String>()); } public void createAccessRequirement() throws Exception { // Add an access requirement to this entity accessRequirement = newAccessRequirement(); String entityId = project.getId(); RestrictableObjectDescriptor entitySubjectId = new RestrictableObjectDescriptor(); entitySubjectId.setId(entityId); entitySubjectId.setType(RestrictableObjectType.ENTITY); RestrictableObjectDescriptor evaluationSubjectId = new RestrictableObjectDescriptor(); assertNotNull(evaluation); assertNotNull(evaluation.getId()); evaluationSubjectId.setId(evaluation.getId()); evaluationSubjectId.setType(RestrictableObjectType.EVALUATION); accessRequirement.setSubjectIds(Arrays.asList(new RestrictableObjectDescriptor[] { entitySubjectId, evaluationSubjectId })); accessRequirement = ServletTestHelper.createAccessRequirement(DispatchServletSingleton.getInstance(), accessRequirement, adminUserId, new HashMap<String, String>()); } private TermsOfUseAccessApproval newToUAccessApproval(Long requirementId, String accessorId) { TermsOfUseAccessApproval aa = new TermsOfUseAccessApproval(); aa.setAccessorId(accessorId); aa.setEntityType(TermsOfUseAccessApproval.class.getName()); aa.setRequirementId(requirementId); return aa; } public void createV2WikiPages() throws NotFoundException { // Using wikiPageDao until wiki service is created // Create a V2 Wiki page v2RootWiki = new V2WikiPage(); v2RootWiki.setCreatedBy(adminUserIdString); v2RootWiki.setModifiedBy(adminUserIdString); v2RootWiki.setAttachmentFileHandleIds(new LinkedList<String>()); v2RootWiki.getAttachmentFileHandleIds().add(handleOne.getId()); v2RootWiki.setTitle("Root title"); v2RootWiki.setMarkdownFileHandleId(markdownOne.getId()); Map<String, FileHandle> map = new HashMap<String, FileHandle>(); map.put(handleOne.getFileName(), handleOne); List<String> newIds = new ArrayList<String>(); newIds.add(handleOne.getId()); v2RootWiki = v2wikiPageDAO.create(v2RootWiki, map, fileEntity.getId(), ObjectType.ENTITY, newIds); // Create a child v2SubWiki = new V2WikiPage(); v2SubWiki.setCreatedBy(adminUserIdString); v2SubWiki.setModifiedBy(adminUserIdString); v2SubWiki.setParentWikiId(v2RootWiki.getId()); v2SubWiki.setTitle("V2 Sub-wiki-title"); v2SubWiki.setMarkdownFileHandleId(markdownOne.getId()); v2SubWiki = v2wikiPageDAO.create(v2SubWiki, new HashMap<String, FileHandle>(), fileEntity.getId(), ObjectType.ENTITY, new ArrayList<String>()); } /** * Create the entities used by this test. * * @throws JSONObjectAdapterException * @throws ServletException * @throws IOException * @throws NotFoundException */ public void createEntities() throws JSONObjectAdapterException, ServletException, IOException, NotFoundException { // Create a project project = new Project(); project.setName("MigrationIntegrationAutowireTest.Project"); project.setEntityType(Project.class.getName()); project = serviceProvider.getEntityService().createEntity(adminUserId, project, null, mockRequest); // Create a file entity fileEntity = new FileEntity(); fileEntity.setName("MigrationIntegrationAutowireTest.FileEntity"); fileEntity.setEntityType(FileEntity.class.getName()); fileEntity.setParentId(project.getId()); fileEntity.setDataFileHandleId(handleOne.getId()); fileEntity = serviceProvider.getEntityService().createEntity(adminUserId, fileEntity, activity.getId(), mockRequest); // Create a folder to trash folderToTrash = new Folder(); folderToTrash.setName("boundForTheTrashCan"); folderToTrash.setParentId(project.getId()); folderToTrash = serviceProvider.getEntityService().createEntity(adminUserId, folderToTrash, null, mockRequest); // Send it to the trash can serviceProvider.getTrashService().moveToTrash(adminUserId, folderToTrash.getId()); } private AccessRequirement newAccessRequirement() { TermsOfUseAccessRequirement dto = new TermsOfUseAccessRequirement(); dto.setEntityType(dto.getClass().getName()); dto.setAccessType(ACCESS_TYPE.DOWNLOAD); dto.setTermsOfUse("foo"); return dto; } /** * Create the file handles used by this test. * * @throws NotFoundException */ public String createFileHandles() throws NotFoundException { // Create a file handle handleOne = new S3FileHandle(); handleOne.setCreatedBy(adminUserIdString); handleOne.setCreatedOn(new Date()); handleOne.setBucketName("bucket"); handleOne.setKey("mainFileKey"); handleOne.setEtag("etag"); handleOne.setFileName("foo.bar"); handleOne = fileMetadataDao.createFile(handleOne); // Create markdown content markdownOne = new S3FileHandle(); markdownOne.setCreatedBy(adminUserIdString); markdownOne.setCreatedOn(new Date()); markdownOne.setBucketName("bucket"); markdownOne.setKey("markdownFileKey"); markdownOne.setEtag("etag"); markdownOne.setFileName("markdown1"); markdownOne = fileMetadataDao.createFile(markdownOne); // Create a preview preview = new PreviewFileHandle(); preview.setCreatedBy(adminUserIdString); preview.setCreatedOn(new Date()); preview.setBucketName("bucket"); preview.setKey("previewFileKey"); preview.setEtag("etag"); preview.setFileName("bar.txt"); preview = fileMetadataDao.createFile(preview); // Set two as the preview of one fileMetadataDao.setPreviewId(handleOne.getId(), preview.getId()); return handleOne.getId(); } private void createStorageQuota() { storageQuotaManager.setQuotaForUser(adminUserInfo, adminUserInfo, 3000); } // returns a group for use in a team private UserGroup createUserGroups(int index) throws NotFoundException { List<String> adder = new ArrayList<String>(); // Make one group UserGroup parentGroup = new UserGroup(); parentGroup.setIsIndividual(false); parentGroup.setId(userGroupDAO.create(parentGroup).toString()); // Make two users UserGroup parentUser = new UserGroup(); parentUser.setIsIndividual(true); parentUser.setId(userGroupDAO.create(parentUser).toString()); UserGroup siblingUser = new UserGroup(); siblingUser.setIsIndividual(true); siblingUser.setId(userGroupDAO.create(siblingUser).toString()); // Nest one group and two users within the parent group adder.add(parentUser.getId()); adder.add(siblingUser.getId()); groupMembersDAO.addMembers(parentGroup.getId(), adder); return parentGroup; } private void createCredentials(UserGroup group) throws Exception { Long principalId = Long.parseLong(group.getId()); authDAO.changePassword(principalId, "ThisIsMySuperSecurePassword"); authDAO.changeSecretKey(principalId); authDAO.changeSessionToken(principalId, null); } @SuppressWarnings("serial") private void createMessages(final UserGroup group, String fileHandleId) { MessageToUser dto = new MessageToUser(); // Note: ID is auto generated dto.setCreatedBy(group.getId()); dto.setFileHandleId(fileHandleId); // Note: CreatedOn is set by the DAO dto.setSubject("See you on the other side?"); dto.setRecipients(new HashSet<String>() { { add(group.getId()); } }); dto.setInReplyTo(null); // Note: InReplyToRoot is calculated by the DAO dto = messageDAO.createMessage(dto); messageDAO.createMessageStatus_NewTransaction(dto.getId(), group.getId(), null); Comment dto2 = new Comment(); dto2.setCreatedBy(group.getId()); dto2.setFileHandleId(fileHandleId); dto2.setTargetId("1337"); dto2.setTargetType(ObjectType.ENTITY); commentDAO.createComment(dto2); } private void createTeamsRequestsAndInvitations(UserGroup group) { String otherUserId = BOOTSTRAP_PRINCIPAL.ANONYMOUS_USER.getPrincipalId().toString(); Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); teamDAO.create(team); // create a MembershipRqstSubmission MembershipRqstSubmission mrs = new MembershipRqstSubmission(); Date createdOn = new Date(); Date expiresOn = new Date(); mrs.setCreatedOn(createdOn); mrs.setExpiresOn(expiresOn); mrs.setMessage("Please let me join the team."); mrs.setTeamId("" + group.getId()); // need another valid user group mrs.setUserId(otherUserId); membershipRqstSubmissionDAO.create(mrs); // create a MembershipInvtnSubmission MembershipInvtnSubmission mis = new MembershipInvtnSubmission(); mis.setCreatedOn(createdOn); mis.setExpiresOn(expiresOn); mis.setMessage("Please join the team."); mis.setTeamId("" + group.getId()); // need another valid user group mis.setInviteeId(otherUserId); membershipInvtnSubmissionDAO.create(mis); } private void createCommunity(UserGroup group) throws Exception { Team team = new Team(); team.setId(group.getId()); team.setName(UUID.randomUUID().toString()); team.setDescription("test team"); team = teamDAO.create(team); // Create a community community = new Community(); community.setName("MigrationIntegrationAutowireTest.Community"); community.setEntityType(Community.class.getName()); community.setTeamId(team.getId()); community = serviceProvider.getEntityService().createEntity(adminUserId, community, null, mockRequest); communityTeamDAO.create(KeyFactory.stringToKey(community.getId()), Long.parseLong(team.getId())); } private void createParticipantData(UserGroup sampleGroup) throws Exception { Long participantId = Long.parseLong(sampleGroup.getId()) ^ -1L; bridgeParticipantDAO.create(participantId); bridgeUserParticipantMappingDAO.setParticipantIdsForUser(Long.parseLong(sampleGroup.getId()), Collections.<ParticipantDataId> singletonList(new ParticipantDataId(participantId))); ParticipantDataDescriptor participantDataDescriptor = new ParticipantDataDescriptor(); participantDataDescriptor.setName(participantId.toString() + "desc"); participantDataDescriptor.setRepeatType(ParticipantDataRepeatType.ALWAYS); participantDataDescriptor.setRepeatFrequency("0 0 4 * * ? *"); participantDataDescriptor = participantDataDescriptorDAO.createParticipantDataDescriptor(participantDataDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor.setName("a"); participantDataColumnDescriptor.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor); ParticipantDataColumnDescriptor participantDataColumnDescriptor2 = new ParticipantDataColumnDescriptor(); participantDataColumnDescriptor2.setParticipantDataDescriptorId(participantDataDescriptor.getId()); participantDataColumnDescriptor2.setName("b"); participantDataColumnDescriptor2.setColumnType(ParticipantDataColumnType.STRING); participantDataDescriptorDAO.createParticipantDataColumnDescriptor(participantDataColumnDescriptor2); ParticipantDataRow dataRow = new ParticipantDataRow(); ParticipantDataStringValue stringValue1 = new ParticipantDataStringValue(); stringValue1.setValue("1"); ParticipantDataStringValue stringValue2 = new ParticipantDataStringValue(); stringValue2.setValue("2"); dataRow.setData(ImmutableMap.<String, ParticipantDataValue> builder().put("a", stringValue1).put("b", stringValue2).build()); List<ParticipantDataRow> data = Lists.newArrayList(dataRow); participantDataDAO.append(new ParticipantDataId(participantId), participantDataDescriptor.getId(), data, Lists.newArrayList(participantDataColumnDescriptor, participantDataColumnDescriptor2)); ParticipantDataStatus status = new ParticipantDataStatus(); status.setParticipantDataDescriptorId(participantDataDescriptor.getId()); status.setLastEntryComplete(false); status.setLastPrompted(new Date()); status.setLastStarted(new Date()); participantDataStatusDAO.update(Collections.<ParticipantDataStatus> singletonList(status), ImmutableMap .<String, ParticipantDataId> builder().put(participantDataDescriptor.getId(), new ParticipantDataId(participantId)).build()); } @After public void after() throws Exception { // to cleanup for this test we delete all in the database resetDatabase(); } /** * This is the actual test. The rest of the class is setup and tear down. * * @throws Exception */ @Test public void testRoundTrip() throws Exception { // Get the list of primary types MigrationTypeList primaryTypesList = entityServletHelper.getPrimaryMigrationTypes(adminUserId); assertNotNull(primaryTypesList); assertNotNull(primaryTypesList.getList()); assertTrue(primaryTypesList.getList().size() > 0); // Get the counts before we start MigrationTypeCounts startCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); validateStartingCount(startCounts); // This test will backup all data, delete it, then restore it. List<BackupInfo> backupList = new ArrayList<BackupInfo>(); for (MigrationType type : primaryTypesList.getList()) { // Backup each type backupList.addAll(backupAllOfType(type)); } // Now delete all data in reverse order for (int i = primaryTypesList.getList().size() - 1; i >= 0; i--) { MigrationType type = primaryTypesList.getList().get(i); deleteAllOfType(type); } // After deleting, the counts should be 0 except for a few special cases MigrationTypeCounts afterDeleteCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); assertNotNull(afterDeleteCounts); assertNotNull(afterDeleteCounts.getList()); for (int i = 0; i < afterDeleteCounts.getList().size(); i++) { MigrationTypeCount afterDelete = afterDeleteCounts.getList().get(i); // Special cases for the not-deleted migration admin if (afterDelete.getType() == MigrationType.PRINCIPAL) { assertEquals("There should be 4 UserGroups remaining after the delete: " + BOOTSTRAP_PRINCIPAL.THE_ADMIN_USER + ", " + BOOTSTRAP_PRINCIPAL.ADMINISTRATORS_GROUP + ", " + BOOTSTRAP_PRINCIPAL.PUBLIC_GROUP + ", and " + BOOTSTRAP_PRINCIPAL.AUTHENTICATED_USERS_GROUP, new Long(4), afterDelete.getCount()); } else if (afterDelete.getType() == MigrationType.GROUP_MEMBERS || afterDelete.getType() == MigrationType.CREDENTIAL) { assertEquals("Counts do not match for: " + afterDelete.getType().name(), new Long(1), afterDelete.getCount()); } else { assertEquals("Counts are non-zero for: " + afterDelete.getType().name(), new Long(0), afterDelete.getCount()); } } // Now restore all of the data for (BackupInfo info : backupList) { String fileName = info.getFileName(); assertNotNull("Did not find a backup file name for type: " + info.getType(), fileName); restoreFromBackup(info.getType(), fileName); } // The counts should all be back MigrationTypeCounts finalCounts = entityServletHelper.getMigrationTypeCounts(adminUserId); for (int i = 1; i < finalCounts.getList().size(); i++) { MigrationTypeCount startCount = startCounts.getList().get(i); MigrationTypeCount afterRestore = finalCounts.getList().get(i); assertEquals("Count for " + startCount.getType().name() + " does not match", startCount.getCount(), afterRestore.getCount()); } } private static class BackupInfo { MigrationType type; String fileName; public BackupInfo(MigrationType type, String fileName) { super(); this.type = type; this.fileName = fileName; } public MigrationType getType() { return type; } public String getFileName() { return fileName; } } /** * There must be at least one object for every type of migratable object. * * @param startCounts */ private void validateStartingCount(MigrationTypeCounts startCounts) { assertNotNull(startCounts); assertNotNull(startCounts.getList()); List<MigrationType> typesToMigrate = new LinkedList<MigrationType>(); for (MigrationType tm : MigrationType.values()) { if (migrationManager.isMigrationTypeUsed(adminUserInfo, tm)) { typesToMigrate.add(tm); } } assertEquals( "This test requires at least one object to exist for each MigrationType. Please create a new object of the new MigrationType in the before() method of this test.", typesToMigrate.size(), startCounts.getList().size()); for (MigrationTypeCount count : startCounts.getList()) { assertTrue("This test requires at least one object to exist for each MigrationType. Please create a new object of type: " + count.getType() + " in the before() method of this test.", count.getCount() > 0); } } /** * Extract the filename from the full url. * * @param fullUrl * @return */ public String getFileNameFromUrl(String fullUrl) { ; int index = fullUrl.lastIndexOf("/"); return fullUrl.substring(index + 1, fullUrl.length()); } /** * Backup all data * * @param type * @return * @throws Exception */ private List<BackupInfo> backupAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if (list == null) return null; // Backup batches by their level in the tree ListBucketProvider provider = new ListBucketProvider(); MigrationUtils.bucketByTreeLevel(list.getList().iterator(), provider); List<BackupInfo> result = new ArrayList<BackupInfo>(); List<List<Long>> listOfBuckets = provider.getListOfBuckets(); for (List<Long> batch : listOfBuckets) { if (batch.size() > 0) { String fileName = backup(type, batch); result.add(new BackupInfo(type, fileName)); } } return result; } private String backup(MigrationType type, List<Long> tobackup) throws Exception { // Start the backup job IdList ids = new IdList(); ids.setList(tobackup); BackupRestoreStatus status = entityServletHelper.startBackup(adminUserId, type, ids); // wait for it.. waitForDaemon(status); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); assertNotNull(status.getBackupUrl()); return getFileNameFromUrl(status.getBackupUrl()); } private void restoreFromBackup(MigrationType type, String fileName) throws Exception { RestoreSubmission sub = new RestoreSubmission(); sub.setFileName(fileName); BackupRestoreStatus status = entityServletHelper.startRestore(adminUserId, type, sub); // wait for it waitForDaemon(status); } /** * Delete all data for a type. * * @param type * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private void deleteAllOfType(MigrationType type) throws Exception { IdList idList = getIdListOfAllOfType(type); if (idList == null) return; MigrationTypeCount result = entityServletHelper.deleteMigrationType(adminUserId, type, idList); System.out.println("Deleted: " + result); } /** * List all of the IDs for a type. * * @param type * @return * @throws ServletException * @throws IOException * @throws JSONObjectAdapterException */ private IdList getIdListOfAllOfType(MigrationType type) throws Exception { RowMetadataResult list = entityServletHelper.getRowMetadata(adminUserId, type, Long.MAX_VALUE, 0); if (list.getTotalCount() < 1) return null; // Create the backup list List<Long> toBackup = new LinkedList<Long>(); for (RowMetadata row : list.getList()) { toBackup.add(row.getId()); } IdList idList = new IdList(); idList.setList(toBackup); return idList; } /** * Wait for a deamon to process a a job. * * @param status * @throws InterruptedException * @throws JSONObjectAdapterException * @throws IOException * @throws ServletException */ private void waitForDaemon(BackupRestoreStatus status) throws Exception { long start = System.currentTimeMillis(); while (DaemonStatus.COMPLETED != status.getStatus()) { assertFalse("Daemon failed " + status.getErrorDetails(), DaemonStatus.FAILED == status.getStatus()); System.out.println("Waiting for backup/restore daemon. Message: " + status.getProgresssMessage()); Thread.sleep(1000); long elapse = System.currentTimeMillis() - start; assertTrue("Timed out waiting for a backup/restore daemon", elapse < MAX_WAIT_MS); status = entityServletHelper.getBackupRestoreStatus(adminUserId, status.getId()); } } }
Migration test.
services/repository/src/test/java/org/sagebionetworks/repo/web/migration/MigrationIntegrationAutowireTest.java
Migration test.
Java
apache-2.0
f686a15e7046a50fe8696aa9f83244c5b3797b43
0
debop/debop4k,debop/debop4k
/* * Copyright (c) 2016. Sunghyouk Bae <[email protected]> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package debop4k.data.orm.hibernate.usertypes.jodatime; import debop4k.core.kodatimes.KodaTimex; import debop4k.data.orm.hibernate.usertypes.BaseCompositeUserType; import debop4k.timeperiod.ITimePeriod; import debop4k.timeperiod.TimePeriod; import debop4k.timeperiod.TimeRange; import lombok.extern.slf4j.Slf4j; import org.hibernate.HibernateException; import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.TimestampType; import org.hibernate.type.Type; import org.joda.time.DateTime; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; /** * {@link ITimePeriod} 정보중 시작일자와 완료일자를 Timestamp 값으로 저장하도록 합니다. * <p> * <pre><code> * @Columns( columns = { @Column(name = "startTimestamp"), @Column(name = "endTimestamp") } ) * @hba.Type( `type` = "debop4s.data.orm.hibernate.usertype.jodatime.TimePeriodAsTimestamp") * private ITimePeriod period; * </code></pre> * * @author [email protected] */ @Slf4j public class TimePeriodAsTimestampUserType extends BaseCompositeUserType { protected ITimePeriod asTimePeriod(Object value) { if (value != null && value instanceof ITimePeriod) { return (ITimePeriod) value; } return null; } @Override public String[] getPropertyNames() { return new String[]{"startTimestamp", "endTimestamp"}; } @Override public Type[] getPropertyTypes() { return new Type[]{TimestampType.INSTANCE, TimestampType.INSTANCE}; } @Override public Object getPropertyValue(Object component, int property) throws HibernateException { ITimePeriod period = asTimePeriod(component); if (period != null) { return (property == 0) ? period.getStart() : period.getEnd(); } return null; } @Override public void setPropertyValue(Object component, int property, Object value) throws HibernateException { ITimePeriod period = asTimePeriod(component); if (period != null) { switch (property) { case 0: period.setup((DateTime) value, period.getEnd()); case 1: period.setup(period.getStart(), (DateTime) value); } } } @Override public Class returnedClass() { return TimeRange.class; } @Override public Object nullSafeGet(ResultSet rs, String[] names, SharedSessionContractImplementor session, Object owner) throws HibernateException, SQLException { Timestamp start = (Timestamp) StandardBasicTypes.TIMESTAMP.nullSafeGet(rs, names[0], session, owner); Timestamp end = (Timestamp) StandardBasicTypes.TIMESTAMP.nullSafeGet(rs, names[1], session, owner); return new TimeRange(KodaTimex.toDateTime(start), KodaTimex.toDateTime(end)); } @Override public void nullSafeSet(PreparedStatement st, Object value, int index, SharedSessionContractImplementor session) throws HibernateException, SQLException { ITimePeriod period = asTimePeriod(value); if (period == null) { StandardBasicTypes.TIMESTAMP.nullSafeSet(st, null, index, session); StandardBasicTypes.TIMESTAMP.nullSafeSet(st, null, index + 1, session); } else { Timestamp start = period.getHasStart() ? new Timestamp(period.getStart().getMillis()) : null; Timestamp end = period.getHasEnd() ? new Timestamp(period.getEnd().getMillis()) : null; StandardBasicTypes.TIMESTAMP.nullSafeSet(st, start, index, session); StandardBasicTypes.TIMESTAMP.nullSafeSet(st, end, index + 1, session); } } @Override public Object deepCopy(Object value) throws HibernateException { return (value != null) ? new TimePeriod(asTimePeriod(value)) : null; } }
debop4k-data-orm/src/main/java/debop4k/data/orm/hibernate/usertypes/jodatime/TimePeriodAsTimestampUserType.java
/* * Copyright (c) 2016. Sunghyouk Bae <[email protected]> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package debop4k.data.orm.hibernate.usertypes.jodatime; import debop4k.core.kodatimes.KodaTimex; import debop4k.data.orm.hibernate.usertypes.BaseCompositeUserType; import debop4k.timeperiod.ITimePeriod; import debop4k.timeperiod.TimePeriod; import debop4k.timeperiod.TimeRange; import lombok.extern.slf4j.Slf4j; import org.hibernate.HibernateException; import org.hibernate.engine.spi.SharedSessionContractImplementor; import org.hibernate.type.StandardBasicTypes; import org.hibernate.type.TimestampType; import org.hibernate.type.Type; import org.joda.time.DateTime; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Timestamp; /** * {@link ITimePeriod} 정보중 시작일자와 완료일자를 Timestamp 값으로 저장하도록 합니다. * <p> * <pre><code> * @Columns( columns = { @Column(name = "startTimestamp"), @Column(name = "endTimestamp") } ) * @hba.Type( `type` = "debop4s.data.orm.hibernate.usertype.jodatime.TimePeriodAsTimestamp") * private ITimePeriod period; * </code></pre> * * @author [email protected] */ @Slf4j public class TimePeriodAsTimestampUserType extends BaseCompositeUserType { protected ITimePeriod asTimePeriod(Object value) { if (value != null && value instanceof ITimePeriod) { return (ITimePeriod) value; } return null; } @Override public String[] getPropertyNames() { return new String[]{"startTimestamp", "endTimestamp"}; } @Override public Type[] getPropertyTypes() { return new Type[]{TimestampType.INSTANCE, TimestampType.INSTANCE}; } @Override public Object getPropertyValue(Object component, int property) throws HibernateException { ITimePeriod period = asTimePeriod(component); if (period != null) { return (property == 0) ? period.getStart() : period.getEnd(); } return null; } @Override public void setPropertyValue(Object component, int property, Object value) throws HibernateException { ITimePeriod period = asTimePeriod(component); if (period != null) { switch (property) { case 0: period.setup((DateTime) value, period.getEnd()); case 1: period.setup(period.getStart(), (DateTime) value); } } } @Override public Class returnedClass() { return TimeRange.class; } @Override public Object nullSafeGet(ResultSet rs, String[] names, SharedSessionContractImplementor session, Object owner) throws HibernateException, SQLException { Timestamp start = (Timestamp) StandardBasicTypes.TIMESTAMP.nullSafeGet(rs, names[0], session, owner); Timestamp end = (Timestamp) StandardBasicTypes.TIMESTAMP.nullSafeGet(rs, names[1], session, owner); return new TimeRange(KodaTimex.toDateTime(start), KodaTimex.toDateTime(end)); } @Override public void nullSafeSet(PreparedStatement st, Object value, int index, SharedSessionContractImplementor session) throws HibernateException, SQLException { ITimePeriod period = asTimePeriod(value); if (period == null) { StandardBasicTypes.TIMESTAMP.nullSafeSet(st, null, index, session); StandardBasicTypes.TIMESTAMP.nullSafeSet(st, null, index + 1, session); } else { Timestamp start = period.hasStart() ? new Timestamp(period.getStart().getMillis()) : null; Timestamp end = period.hasEnd() ? new Timestamp(period.getEnd().getMillis()) : null; StandardBasicTypes.TIMESTAMP.nullSafeSet(st, start, index, session); StandardBasicTypes.TIMESTAMP.nullSafeSet(st, end, index + 1, session); } } @Override public Object deepCopy(Object value) throws HibernateException { return (value != null) ? new TimePeriod(asTimePeriod(value)) : null; } }
-. refactoring
debop4k-data-orm/src/main/java/debop4k/data/orm/hibernate/usertypes/jodatime/TimePeriodAsTimestampUserType.java
-. refactoring
Java
apache-2.0
179654da43f406fe6ad438defe3e176305eccc03
0
openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb,openengsb/openengsb
/** * Licensed to the Austrian Association for Software Tool Integration (AASTI) * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. The AASTI licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openengsb.connector.memoryauditing.internal; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.openengsb.core.api.AliveState; import org.openengsb.core.common.AbstractOpenEngSBService; import org.openengsb.domain.auditing.AuditingDomain; public class MemoryAuditingServiceImpl extends AbstractOpenEngSBService implements AuditingDomain { private final List<String> messages = Collections.synchronizedList(new ArrayList<String>()); public MemoryAuditingServiceImpl() { } public MemoryAuditingServiceImpl(String instanceId) { super(instanceId); } @Override public AliveState getAliveState() { return AliveState.ONLINE; } @Override public void audit(String message) { messages.add(message); } @Override public List<String> getAudits() { return Collections.unmodifiableList(messages); } }
connector/memoryauditing/src/main/java/org/openengsb/connector/memoryauditing/internal/MemoryAuditingServiceImpl.java
/** * Licensed to the Austrian Association for Software Tool Integration (AASTI) * under one or more contributor license agreements. See the NOTICE file * distributed with this work for additional information regarding copyright * ownership. The AASTI licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.openengsb.connector.memoryauditing.internal; import java.util.ArrayList; import java.util.Collections; import java.util.List; import org.openengsb.core.api.AliveState; import org.openengsb.core.common.AbstractOpenEngSBService; import org.openengsb.domain.auditing.AuditingDomain; public class MemoryAuditingServiceImpl extends AbstractOpenEngSBService implements AuditingDomain { private final List<String> messages = Collections.synchronizedList(new ArrayList<String>()); public MemoryAuditingServiceImpl() { } public MemoryAuditingServiceImpl(String instanceId) { super(instanceId); } @Override public AliveState getAliveState() { return AliveState.ONLINE; } @Override public void audit(String message) { messages.add(message); } @Override public List<String> getAudits() { return Collections.unmodifiableList(messages); } @Override public String getInstanceId() { return "auditing"; } }
[OPENENGSB-1419] make sure memory-auditing-connector returns the correct instanceid
connector/memoryauditing/src/main/java/org/openengsb/connector/memoryauditing/internal/MemoryAuditingServiceImpl.java
[OPENENGSB-1419] make sure memory-auditing-connector returns the correct instanceid
Java
apache-2.0
3239c0d486ce43a2af27098a51ad6b15eaea1e1c
0
tadayosi/camel,apache/camel,adessaigne/camel,adessaigne/camel,apache/camel,tdiesler/camel,apache/camel,tadayosi/camel,pax95/camel,cunningt/camel,apache/camel,cunningt/camel,cunningt/camel,cunningt/camel,apache/camel,pax95/camel,christophd/camel,adessaigne/camel,pax95/camel,pax95/camel,christophd/camel,tadayosi/camel,tdiesler/camel,cunningt/camel,tdiesler/camel,tdiesler/camel,tadayosi/camel,christophd/camel,cunningt/camel,adessaigne/camel,tdiesler/camel,pax95/camel,apache/camel,adessaigne/camel,christophd/camel,tadayosi/camel,tdiesler/camel,adessaigne/camel,pax95/camel,tadayosi/camel,christophd/camel,christophd/camel
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws2.translate; public enum Translate2LanguageEnum { AFRIKAANS("af"), ALBANIAN("sq"), AMHARIC("am"), ARMENIAN("hy"), ARABIC("ar"), AZERBAIJANI("az"), BENGALI("bn"), BOSNIAN("bs"), BULGARIAN("bg"), CATALAN("ca"), CHINESE_SIMPLIFIED("zh"), CHINESE_TRADITIONAL("zh-TW"), CROATIAN("cr"), CZECH("cs"), DANISH("da"), DUTCH("nl"), ENGLISH("en"), FINNISH("fi"), FRENCH("fr"), GERMAN("de"), HEBREW( "he"), HINDI("hi"), INDONESIAN("id"), ITALIAN("it"), JAPANESE( "ja"), KOREAN("ko"), MALAY("ms"), NORWEGIAN("no"), PERSIAN("fa"), POLISH("pl"), PORTUGUESE("pt"), RUSSIAN("ru"), SPANISH("es"), SWEDISH("sv"), TURKISH("tr"); private final String language; Translate2LanguageEnum(final String language) { this.language = language; } @Override public String toString() { return language; } }
components/camel-aws/camel-aws2-translate/src/main/java/org/apache/camel/component/aws2/translate/Translate2LanguageEnum.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.aws2.translate; public enum Translate2LanguageEnum { AFRIKAANS("af"), ALBANIAN("sq"), AMHARIC("am"), ARMENIAN("hy"), ARABIC("ar"), AZERBAIJANI("az"), BENGALI("bn"), BOSNIAN("bs"), BULGARIAN("bg"), CHINESE_SIMPLIFIED("zh"), CHINESE_TRADITIONAL("zh-TW"), CZECH("cs"), DANISH("da"), DUTCH("nl"), ENGLISH("en"), FINNISH("fi"), FRENCH("fr"), GERMAN("de"), HEBREW( "he"), HINDI("hi"), INDONESIAN("id"), ITALIAN("it"), JAPANESE( "ja"), KOREAN("ko"), MALAY("ms"), NORWEGIAN("no"), PERSIAN("fa"), POLISH("pl"), PORTUGUESE("pt"), RUSSIAN("ru"), SPANISH("es"), SWEDISH("sv"), TURKISH("tr"); private final String language; Translate2LanguageEnum(final String language) { this.language = language; } @Override public String toString() { return language; } }
CAMEL-16641 - AWS-Translate: List all the available languages in the enum - Starting with "C" complete
components/camel-aws/camel-aws2-translate/src/main/java/org/apache/camel/component/aws2/translate/Translate2LanguageEnum.java
CAMEL-16641 - AWS-Translate: List all the available languages in the enum - Starting with "C" complete
Java
apache-2.0
8a2f317b7db79d06274de882e4663b5e84c560a2
0
openwide-java/owsi-core-parent,openwide-java/owsi-core-parent,openwide-java/owsi-core-parent,openwide-java/owsi-core-parent
package fr.openwide.core.commons.util.functional; import static com.google.common.base.Preconditions.checkNotNull; import java.io.Serializable; import java.util.Collection; import java.util.Comparator; import java.util.Map; import java.util.Set; import org.apache.commons.lang3.StringUtils; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; public final class Predicates2 { private Predicates2() { } /** * An identity-like predicate evaluating to true when the input is true, false when it is false, * and <strong>false when the input is null</strong>. * @see Predicates2#isTrueOrNull() */ public static Predicate<Boolean> isTrue() { return Predicates.equalTo(true); } /** * An identity-like predicate evaluating to true when the input is true, false when it is false, * and <strong>true when the input is null</strong>. */ public static Predicate<Boolean> isTrueOrNull() { return Predicates.or(Predicates.equalTo(true), Predicates.isNull()); } /** * A not-like predicate evaluating to false when the input is true, true when it is false, * and <strong>false when the input is null</strong>. * @see Predicates2#isFalseOrNull() */ public static Predicate<Boolean> isFalse() { return Predicates.equalTo(false); } /** * A not-like predicate evaluating to false when the input is true, true when it is false, * and <strong>true when the input is null</strong>. */ public static Predicate<Boolean> isFalseOrNull() { return Predicates.or(Predicates.equalTo(false), Predicates.isNull()); } public static <T extends Collection<?>> Predicate<T> isEmpty() { return CollectionPredicate.IS_EMPTY.withNarrowedType(); } public static <T extends Collection<?>> Predicate<T> notEmpty() { return CollectionPredicate.NOT_EMPTY.withNarrowedType(); } private enum CollectionPredicate implements Predicate<Collection<?>>, Serializable { IS_EMPTY { @Override public boolean apply(Collection<?> input) { return input == null || input.isEmpty(); } }, NOT_EMPTY { @Override public boolean apply(Collection<?> input) { return input != null && !input.isEmpty(); } }; @SuppressWarnings("unchecked") // these Collection predicates work for any T that extends Collection<?> <T extends Collection<?>> Predicate<T> withNarrowedType() { return (Predicate<T>) this; } } public static <T extends Map<?, ?>> Predicate<T> mapIsEmpty() { return MapPredicate.IS_EMPTY.withNarrowedType(); } public static <T extends Map<?, ?>> Predicate<T> mapNotEmpty() { return MapPredicate.NOT_EMPTY.withNarrowedType(); } private enum MapPredicate implements Predicate<Map<?, ?>>, Serializable { IS_EMPTY { @Override public boolean apply(Map<?, ?> input) { return input == null || input.isEmpty(); } }, NOT_EMPTY { @Override public boolean apply(Map<?, ?> input) { return input != null && !input.isEmpty(); } }; @SuppressWarnings("unchecked") // these Map predicates work for any T that extends Map<?, ?> <T extends Map<?, ?>> Predicate<T> withNarrowedType() { return (Predicate<T>) this; } } public static Predicate<Collection<?>> contains(Object referenceValue) { return new ContainsPredicate(referenceValue); } private static class ContainsPredicate implements SerializablePredicate<Collection<?>> { private static final long serialVersionUID = -9193654606378621631L; private final Object referenceValue; public ContainsPredicate(Object referenceValue) { this.referenceValue = referenceValue; } @Override public boolean apply(Collection<?> input) { return input != null && input.contains(referenceValue); } @Override public String toString() { return "contains(" + referenceValue + ")"; } } public static Predicate<Collection<?>> containsAny(Iterable<?> referenceValues) { return new ContainsAnyPredicate(referenceValues); } private static class ContainsAnyPredicate implements SerializablePredicate<Collection<?>> { private static final long serialVersionUID = -9193654606378621631L; private final Set<?> referenceValues; public ContainsAnyPredicate(Iterable<?> referenceValues) { this.referenceValues = Sets.newLinkedHashSet(checkNotNull(referenceValues)); } @Override public boolean apply(Collection<?> input) { if (input == null) { return false; } for (Object value : referenceValues) { if (input.contains(value)) { return true; } } return false; } @Override public String toString() { return "containsAny(" + referenceValues + ")"; } } public static Predicate<String> hasText() { return StringPredicate.HAS_TEXT; } private enum StringPredicate implements Predicate<String>, Serializable { HAS_TEXT { @Override public boolean apply(String input) { return StringUtils.isNotBlank(input); } } } public static <T> Predicate<T> comparesEqualTo(T value, Comparator<? super T> comparator) { return new ComparesEqualToPredicate<T>(value, comparator); } private static class ComparesEqualToPredicate<T> implements SerializablePredicate<T> { private static final long serialVersionUID = -9193654606378621631L; private final T referenceValue; private final Comparator<? super T> comparator; public ComparesEqualToPredicate(T header, Comparator<? super T> comparator) { this.referenceValue = header; this.comparator = comparator; } @Override public boolean apply(T input) { return comparator.compare(referenceValue, input) == 0; } } /** * @return A predicate returning true if the given iterable is non-null, is non-empty and has at least one element * that satisfies {@code itemPredicate}. * @see Iterables#any(Iterable, Predicate) */ public static <T> Predicate<Iterable<? extends T>> any(Predicate<? super T> itemPredicate) { return new IterableAnyPredicate<T>(itemPredicate); } private static class IterableAnyPredicate<T> implements Predicate<Iterable<? extends T>>, Serializable { private static final long serialVersionUID = -359783441767977199L; private final Predicate<? super T> itemPredicate; public IterableAnyPredicate(Predicate<? super T> itemPredicate) { super(); this.itemPredicate = itemPredicate; } @Override public boolean apply(Iterable<? extends T> input) { return input != null && Iterables.any(input, itemPredicate); } @Override public String toString() { return new StringBuilder().append("any(").append(itemPredicate).append(")").toString(); } } /** * @return A predicate returning true if the given iterable is null, is empty or has only elements * that satisfy {@code itemPredicate}. * @see Iterables#all(Iterable, Predicate) */ public static <T> Predicate<Iterable<? extends T>> all(Predicate<? super T> itemPredicate) { return new IterableAllPredicate<T>(itemPredicate); } private static class IterableAllPredicate<T> implements Predicate<Iterable<? extends T>>, Serializable { private static final long serialVersionUID = -359783441767977199L; private final Predicate<? super T> itemPredicate; public IterableAllPredicate(Predicate<? super T> itemPredicate) { super(); this.itemPredicate = itemPredicate; } @Override public boolean apply(Iterable<? extends T> input) { return input == null || Iterables.all(input, itemPredicate); } @Override public String toString() { return new StringBuilder().append("all(").append(itemPredicate).append(")").toString(); } } }
owsi-core/owsi-core-components/owsi-core-component-commons/src/main/java/fr/openwide/core/commons/util/functional/Predicates2.java
package fr.openwide.core.commons.util.functional; import static com.google.common.base.Preconditions.checkNotNull; import java.io.Serializable; import java.util.Collection; import java.util.Comparator; import java.util.Set; import org.apache.commons.lang3.StringUtils; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; public final class Predicates2 { private Predicates2() { } /** * An identity-like predicate evaluating to true when the input is true, false when it is false, * and <strong>false when the input is null</strong>. * @see Predicates2#isTrueOrNull() */ public static Predicate<Boolean> isTrue() { return Predicates.equalTo(true); } /** * An identity-like predicate evaluating to true when the input is true, false when it is false, * and <strong>true when the input is null</strong>. */ public static Predicate<Boolean> isTrueOrNull() { return Predicates.or(Predicates.equalTo(true), Predicates.isNull()); } /** * A not-like predicate evaluating to false when the input is true, true when it is false, * and <strong>false when the input is null</strong>. * @see Predicates2#isFalseOrNull() */ public static Predicate<Boolean> isFalse() { return Predicates.equalTo(false); } /** * A not-like predicate evaluating to false when the input is true, true when it is false, * and <strong>true when the input is null</strong>. */ public static Predicate<Boolean> isFalseOrNull() { return Predicates.or(Predicates.equalTo(false), Predicates.isNull()); } public static <T extends Collection<?>> Predicate<T> isEmpty() { return CollectionPredicate.IS_EMPTY.withNarrowedType(); } public static <T extends Collection<?>> Predicate<T> notEmpty() { return CollectionPredicate.NOT_EMPTY.withNarrowedType(); } private enum CollectionPredicate implements Predicate<Collection<?>>, Serializable { IS_EMPTY { @Override public boolean apply(Collection<?> input) { return input == null || input.isEmpty(); } }, NOT_EMPTY { @Override public boolean apply(Collection<?> input) { return input != null && !input.isEmpty(); } }; @SuppressWarnings("unchecked") // these Collection predicates work for any T that extends Collection<?> <T extends Collection<?>> Predicate<T> withNarrowedType() { return (Predicate<T>) this; } } public static Predicate<Collection<?>> contains(Object referenceValue) { return new ContainsPredicate(referenceValue); } private static class ContainsPredicate implements SerializablePredicate<Collection<?>> { private static final long serialVersionUID = -9193654606378621631L; private final Object referenceValue; public ContainsPredicate(Object referenceValue) { this.referenceValue = referenceValue; } @Override public boolean apply(Collection<?> input) { return input != null && input.contains(referenceValue); } @Override public String toString() { return "contains(" + referenceValue + ")"; } } public static Predicate<Collection<?>> containsAny(Iterable<?> referenceValues) { return new ContainsAnyPredicate(referenceValues); } private static class ContainsAnyPredicate implements SerializablePredicate<Collection<?>> { private static final long serialVersionUID = -9193654606378621631L; private final Set<?> referenceValues; public ContainsAnyPredicate(Iterable<?> referenceValues) { this.referenceValues = Sets.newLinkedHashSet(checkNotNull(referenceValues)); } @Override public boolean apply(Collection<?> input) { if (input == null) { return false; } for (Object value : referenceValues) { if (input.contains(value)) { return true; } } return false; } @Override public String toString() { return "containsAny(" + referenceValues + ")"; } } public static Predicate<String> hasText() { return StringPredicate.HAS_TEXT; } private enum StringPredicate implements Predicate<String>, Serializable { HAS_TEXT { @Override public boolean apply(String input) { return StringUtils.isNotBlank(input); } } } public static <T> Predicate<T> comparesEqualTo(T value, Comparator<? super T> comparator) { return new ComparesEqualToPredicate<T>(value, comparator); } private static class ComparesEqualToPredicate<T> implements SerializablePredicate<T> { private static final long serialVersionUID = -9193654606378621631L; private final T referenceValue; private final Comparator<? super T> comparator; public ComparesEqualToPredicate(T header, Comparator<? super T> comparator) { this.referenceValue = header; this.comparator = comparator; } @Override public boolean apply(T input) { return comparator.compare(referenceValue, input) == 0; } } /** * @return A predicate returning true if the given iterable is non-null, is non-empty and has at least one element * that satisfies {@code itemPredicate}. * @see Iterables#any(Iterable, Predicate) */ public static <T> Predicate<Iterable<? extends T>> any(Predicate<? super T> itemPredicate) { return new IterableAnyPredicate<T>(itemPredicate); } private static class IterableAnyPredicate<T> implements Predicate<Iterable<? extends T>>, Serializable { private static final long serialVersionUID = -359783441767977199L; private final Predicate<? super T> itemPredicate; public IterableAnyPredicate(Predicate<? super T> itemPredicate) { super(); this.itemPredicate = itemPredicate; } @Override public boolean apply(Iterable<? extends T> input) { return input != null && Iterables.any(input, itemPredicate); } @Override public String toString() { return new StringBuilder().append("any(").append(itemPredicate).append(")").toString(); } } /** * @return A predicate returning true if the given iterable is null, is empty or has only elements * that satisfy {@code itemPredicate}. * @see Iterables#all(Iterable, Predicate) */ public static <T> Predicate<Iterable<? extends T>> all(Predicate<? super T> itemPredicate) { return new IterableAllPredicate<T>(itemPredicate); } private static class IterableAllPredicate<T> implements Predicate<Iterable<? extends T>>, Serializable { private static final long serialVersionUID = -359783441767977199L; private final Predicate<? super T> itemPredicate; public IterableAllPredicate(Predicate<? super T> itemPredicate) { super(); this.itemPredicate = itemPredicate; } @Override public boolean apply(Iterable<? extends T> input) { return input == null || Iterables.all(input, itemPredicate); } @Override public String toString() { return new StringBuilder().append("all(").append(itemPredicate).append(")").toString(); } } }
Ajout de predicates pour mapIsEmpty/mapNotEmpty. git-svn-id: fce87830442b47284c88b98f498bb381ff6d45ea@3611 d3474844-eb8e-4abc-b058-2b321fed648b
owsi-core/owsi-core-components/owsi-core-component-commons/src/main/java/fr/openwide/core/commons/util/functional/Predicates2.java
Ajout de predicates pour mapIsEmpty/mapNotEmpty.
Java
apache-2.0
6bf0d27fef0a135e5f158c1b0b5ac0ba3ebc9303
0
Digas29/bazel,asarazan/bazel,aehlig/bazel,kidaa/bazel,JackSullivan/bazel,murugamsm/bazel,JackSullivan/bazel,akira-baruah/bazel,meteorcloudy/bazel,joshua0pang/bazel,Topher-the-Geek/bazel,bitemyapp/bazel,safarmer/bazel,d/bazel,wakashige/bazel,charlieaustin/bazel,rzagabe/bazel,variac/bazel,kidaa/bazel,kchodorow/bazel,damienmg/bazel,hhclam/bazel,wakashige/bazel,hhclam/bazel,Topher-the-Geek/bazel,Krasnyanskiy/bazel,asarazan/bazel,katre/bazel,snnn/bazel,cushon/bazel,wakashige/bazel,Krasnyanskiy/bazel,gavares/bazel,abergmeier-dsfishlabs/bazel,charlieaustin/bazel,whuwxl/bazel,vt09/bazel,gavares/bazel,xindaya/bazel,bazelbuild/bazel,spxtr/bazel,mrdomino/bazel,snnn/bazel,davidzchen/bazel,d/bazel,Digas29/bazel,mikelalcon/bazel,katre/bazel,nkhuyu/bazel,bazelbuild/bazel,bazelbuild/bazel,UrbanCompass/bazel,sicipio/bazel,ulfjack/bazel,mbrukman/bazel,twitter-forks/bazel,dhootha/bazel,d/bazel,spxtr/bazel,davidzchen/bazel,d/bazel,rzagabe/bazel,joshua0pang/bazel,cushon/bazel,d/bazel,werkt/bazel,safarmer/bazel,dslomov/bazel,ulfjack/bazel,mikelikespie/bazel,mikelikespie/bazel,spxtr/bazel,davidzchen/bazel,dropbox/bazel,Asana/bazel,perezd/bazel,aehlig/bazel,spxtr/bazel,xindaya/bazel,dslomov/bazel-windows,variac/bazel,mikelalcon/bazel,meteorcloudy/bazel,gavares/bazel,rhuss/bazel,kchodorow/bazel,werkt/bazel,dinowernli/bazel,cushon/bazel,ulfjack/bazel,whuwxl/bazel,kchodorow/bazel-1,mrdomino/bazel,mbrukman/bazel,variac/bazel,meteorcloudy/bazel,dhootha/bazel,joshua0pang/bazel,mbrukman/bazel,sicipio/bazel,kidaa/bazel,kamalmarhubi/bazel,kamalmarhubi/bazel,juhalindfors/bazel-patches,snnn/bazel,dhootha/bazel,whuwxl/bazel,anupcshan/bazel,ulfjack/bazel,anupcshan/bazel,sicipio/bazel,Ansahmadiba/bazel,katre/bazel,variac/bazel,ButterflyNetwork/bazel,mikelalcon/bazel,anupcshan/bazel,gavares/bazel,abergmeier-dsfishlabs/bazel,Asana/bazel,ruo91/bazel,kchodorow/bazel-1,iamthearm/bazel,iamthearm/bazel,ruo91/bazel,cushon/bazel,ButterflyNetwork/bazel,ButterflyNetwork/bazel,aehlig/bazel,hermione521/bazel,gavares/bazel,dropbox/bazel,wakashige/bazel,asarazan/bazel,mrdomino/bazel,dslomov/bazel,damienmg/bazel,perezd/bazel,twitter-forks/bazel,vt09/bazel,Krasnyanskiy/bazel,damienmg/bazel,mbrukman/bazel,abergmeier-dsfishlabs/bazel,damienmg/bazel,wakashige/bazel,d/bazel,Asana/bazel,JackSullivan/bazel,bitemyapp/bazel,JackSullivan/bazel,vt09/bazel,manashmndl/bazel,zhexuany/bazel,kchodorow/bazel-1,damienmg/bazel,dslomov/bazel-windows,bitemyapp/bazel,aehlig/bazel,damienmg/bazel,joshua0pang/bazel,damienmg/bazel,juhalindfors/bazel-patches,whuwxl/bazel,gavares/bazel,nkhuyu/bazel,dhootha/bazel,charlieaustin/bazel,Ansahmadiba/bazel,kidaa/bazel,bazelbuild/bazel,Topher-the-Geek/bazel,snnn/bazel,vt09/bazel,variac/bazel,dslomov/bazel,rhuss/bazel,rzagabe/bazel,kchodorow/bazel-1,manashmndl/bazel,ulfjack/bazel,Ansahmadiba/bazel,Topher-the-Geek/bazel,abergmeier-dsfishlabs/bazel,UrbanCompass/bazel,dslomov/bazel-windows,Krasnyanskiy/bazel,rohitsaboo/bazel,rhuss/bazel,Digas29/bazel,spxtr/bazel,dhootha/bazel,kchodorow/bazel,vt09/bazel,zhexuany/bazel,bitemyapp/bazel,akira-baruah/bazel,hhclam/bazel,Digas29/bazel,JackSullivan/bazel,abergmeier-dsfishlabs/bazel,zhexuany/bazel,murugamsm/bazel,kchodorow/bazel-1,Digas29/bazel,Ansahmadiba/bazel,rzagabe/bazel,LuminateWireless/bazel,dropbox/bazel,werkt/bazel,xindaya/bazel,ruo91/bazel,rhuss/bazel,whuwxl/bazel,rhuss/bazel,asarazan/bazel,Digas29/bazel,dropbox/bazel,nkhuyu/bazel,dslomov/bazel-windows,akira-baruah/bazel,kchodorow/bazel,rhuss/bazel,mrdomino/bazel,sicipio/bazel,dinowernli/bazel,charlieaustin/bazel,davidzchen/bazel,kidaa/bazel,aehlig/bazel,UrbanCompass/bazel,Krasnyanskiy/bazel,hermione521/bazel,kchodorow/bazel-1,zhexuany/bazel,rhuss/bazel,nkhuyu/bazel,hermione521/bazel,ruo91/bazel,dslomov/bazel,rzagabe/bazel,davidzchen/bazel,mikelalcon/bazel,JackSullivan/bazel,ruo91/bazel,bazelbuild/bazel,d/bazel,kchodorow/bazel,LuminateWireless/bazel,kamalmarhubi/bazel,gavares/bazel,safarmer/bazel,xindaya/bazel,wakashige/bazel,sicipio/bazel,dslomov/bazel,UrbanCompass/bazel,Ansahmadiba/bazel,sicipio/bazel,hhclam/bazel,mikelikespie/bazel,anupcshan/bazel,meteorcloudy/bazel,manashmndl/bazel,twitter-forks/bazel,hhclam/bazel,Asana/bazel,Krasnyanskiy/bazel,kidaa/bazel,rohitsaboo/bazel,kidaa/bazel,joshua0pang/bazel,twitter-forks/bazel,dropbox/bazel,perezd/bazel,meteorcloudy/bazel,bitemyapp/bazel,hermione521/bazel,anupcshan/bazel,werkt/bazel,rzagabe/bazel,Asana/bazel,twitter-forks/bazel,murugamsm/bazel,ruo91/bazel,ulfjack/bazel,kamalmarhubi/bazel,kamalmarhubi/bazel,snnn/bazel,joshua0pang/bazel,mikelikespie/bazel,rzagabe/bazel,cushon/bazel,vt09/bazel,meteorcloudy/bazel,perezd/bazel,juhalindfors/bazel-patches,cushon/bazel,bitemyapp/bazel,hermione521/bazel,xindaya/bazel,murugamsm/bazel,hermione521/bazel,vt09/bazel,LuminateWireless/bazel,manashmndl/bazel,gavares/bazel,xindaya/bazel,ButterflyNetwork/bazel,LuminateWireless/bazel,akira-baruah/bazel,perezd/bazel,juhalindfors/bazel-patches,ButterflyNetwork/bazel,anupcshan/bazel,kamalmarhubi/bazel,abergmeier-dsfishlabs/bazel,Topher-the-Geek/bazel,mbrukman/bazel,manashmndl/bazel,safarmer/bazel,dropbox/bazel,manashmndl/bazel,asarazan/bazel,nkhuyu/bazel,rzagabe/bazel,Asana/bazel,ulfjack/bazel,kchodorow/bazel,perezd/bazel,iamthearm/bazel,dslomov/bazel-windows,charlieaustin/bazel,murugamsm/bazel,mrdomino/bazel,nkhuyu/bazel,Asana/bazel,bazelbuild/bazel,charlieaustin/bazel,meteorcloudy/bazel,bitemyapp/bazel,nkhuyu/bazel,katre/bazel,dinowernli/bazel,charlieaustin/bazel,aehlig/bazel,akira-baruah/bazel,ButterflyNetwork/bazel,safarmer/bazel,katre/bazel,Ansahmadiba/bazel,twitter-forks/bazel,werkt/bazel,whuwxl/bazel,dhootha/bazel,katre/bazel,davidzchen/bazel,bitemyapp/bazel,Krasnyanskiy/bazel,ruo91/bazel,Topher-the-Geek/bazel,manashmndl/bazel,Topher-the-Geek/bazel,dinowernli/bazel,rohitsaboo/bazel,davidzchen/bazel,dslomov/bazel,joshua0pang/bazel,rohitsaboo/bazel,mikelalcon/bazel,juhalindfors/bazel-patches,aehlig/bazel,mikelikespie/bazel,mikelikespie/bazel,dhootha/bazel,twitter-forks/bazel,iamthearm/bazel,dinowernli/bazel,rohitsaboo/bazel,asarazan/bazel,asarazan/bazel,iamthearm/bazel,juhalindfors/bazel-patches,Topher-the-Geek/bazel,rohitsaboo/bazel,perezd/bazel,dinowernli/bazel,akira-baruah/bazel,kchodorow/bazel,iamthearm/bazel,mikelalcon/bazel,variac/bazel,xindaya/bazel,kidaa/bazel,dslomov/bazel,murugamsm/bazel,spxtr/bazel,LuminateWireless/bazel,snnn/bazel,murugamsm/bazel,dslomov/bazel-windows,juhalindfors/bazel-patches,LuminateWireless/bazel,Krasnyanskiy/bazel,spxtr/bazel,hhclam/bazel,UrbanCompass/bazel,safarmer/bazel,snnn/bazel,mrdomino/bazel,variac/bazel,zhexuany/bazel,mbrukman/bazel,werkt/bazel,wakashige/bazel,UrbanCompass/bazel,zhexuany/bazel,JackSullivan/bazel,Ansahmadiba/bazel,sicipio/bazel
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionExecutionException; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputHelper; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.BaseSpawn; import com.google.devtools.build.lib.actions.EnvironmentalExecException; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.ParameterFile; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.Spawn; import com.google.devtools.build.lib.actions.SpawnActionContext; import com.google.devtools.build.lib.actions.extra.ExtraActionInfo; import com.google.devtools.build.lib.actions.extra.JavaCompileInfo; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.CustomArgv; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.CustomMultiArgv; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.collect.ImmutableIterable; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode; import com.google.devtools.build.lib.syntax.Label; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.util.StringCanonicalizer; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; /** * Action that represents a Java compilation. */ @ThreadCompatible public class JavaCompileAction extends AbstractAction { private static final String GUID = "786e174d-ed97-4e79-9f61-ae74430714cf"; private static final ResourceSet LOCAL_RESOURCES = new ResourceSet(750 /*MB*/, 0.5 /*CPU*/, 0.0 /*IO*/); private final CommandLine javaCompileCommandLine; private final CommandLine commandLine; /** * The directory in which generated classfiles are placed. * May be erased/created by the JavaBuilder. */ private final PathFragment classDirectory; private final Artifact outputJar; /** * The list of classpath entries to specify to javac. */ private final NestedSet<Artifact> classpathEntries; /** * The list of classpath entries to search for annotation processors. */ private final ImmutableList<Artifact> processorPath; /** * The list of annotation processor classes to run. */ private final ImmutableList<String> processorNames; /** * The translation messages. */ private final ImmutableList<Artifact> messages; /** * The set of resources to put into the jar. */ private final ImmutableList<Artifact> resources; /** * The set of classpath resources to put into the jar. */ private final ImmutableList<Artifact> classpathResources; /** * The set of files which contain lists of additional Java source files to * compile. */ private final ImmutableList<Artifact> sourceJars; /** * The set of explicit Java source files to compile. */ private final ImmutableList<Artifact> sourceFiles; /** * The compiler options to pass to javac. */ private final ImmutableList<String> javacOpts; /** * The subset of classpath jars provided by direct dependencies. */ private final ImmutableList<Artifact> directJars; /** * The level of strict dependency checks (off, warnings, or errors). */ private final BuildConfiguration.StrictDepsMode strictJavaDeps; /** * The set of .deps artifacts provided by direct dependencies. */ private final ImmutableList<Artifact> compileTimeDependencyArtifacts; /** * The java semantics to get the list of action outputs. */ private final JavaSemantics semantics; /** * Constructs an action to compile a set of Java source files to class files. * * @param owner the action owner, typically a java_* RuleConfiguredTarget. * @param baseInputs the set of the input artifacts of the compile action * without the parameter file action; * @param outputs the outputs of the action * @param javaCompileCommandLine the command line for the java library * builder - it's actually written to the parameter file, but other * parts (for example, ide_build_info) need access to the data * @param commandLine the actual invocation command line */ private JavaCompileAction(ActionOwner owner, Iterable<Artifact> baseInputs, Collection<Artifact> outputs, CommandLine javaCompileCommandLine, CommandLine commandLine, PathFragment classDirectory, Artifact outputJar, NestedSet<Artifact> classpathEntries, List<Artifact> processorPath, Artifact langtoolsJar, Artifact javaBuilderJar, List<String> processorNames, Collection<Artifact> messages, Collection<Artifact> resources, Collection<Artifact> classpathResources, Collection<Artifact> sourceJars, Collection<Artifact> sourceFiles, List<String> javacOpts, Collection<Artifact> directJars, BuildConfiguration.StrictDepsMode strictJavaDeps, Collection<Artifact> compileTimeDependencyArtifacts, JavaSemantics semantics) { super(owner, NestedSetBuilder.<Artifact>stableOrder() .addTransitive(classpathEntries) .addAll(processorPath) .addAll(messages) .addAll(resources) .addAll(classpathResources) .addAll(sourceJars) .addAll(sourceFiles) .addAll(compileTimeDependencyArtifacts) .addAll(baseInputs) .add(langtoolsJar) .add(javaBuilderJar) .build(), outputs); this.javaCompileCommandLine = javaCompileCommandLine; this.commandLine = commandLine; this.classDirectory = Preconditions.checkNotNull(classDirectory); this.outputJar = outputJar; this.classpathEntries = classpathEntries; this.processorPath = ImmutableList.copyOf(processorPath); this.processorNames = ImmutableList.copyOf(processorNames); this.messages = ImmutableList.copyOf(messages); this.resources = ImmutableList.copyOf(resources); this.classpathResources = ImmutableList.copyOf(classpathResources); this.sourceJars = ImmutableList.copyOf(sourceJars); this.sourceFiles = ImmutableList.copyOf(sourceFiles); this.javacOpts = ImmutableList.copyOf(javacOpts); this.directJars = ImmutableList.copyOf(directJars); this.strictJavaDeps = strictJavaDeps; this.compileTimeDependencyArtifacts = ImmutableList.copyOf(compileTimeDependencyArtifacts); this.semantics = semantics; } /** * Returns the given (passed to constructor) source files. */ @VisibleForTesting public Collection<Artifact> getSourceFiles() { return sourceFiles; } /** * Returns the list of paths that represent the resources to be added to the * jar. */ @VisibleForTesting public Collection<Artifact> getResources() { return resources; } /** * Returns the list of paths that represents the classpath. */ @VisibleForTesting public Iterable<Artifact> getClasspath() { return classpathEntries; } /** * Returns the list of paths that represents the source jars. */ @VisibleForTesting public Collection<Artifact> getSourceJars() { return sourceJars; } /** * Returns the list of paths that represents the processor path. */ @VisibleForTesting public List<Artifact> getProcessorpath() { return processorPath; } @VisibleForTesting public List<String> getJavacOpts() { return javacOpts; } @VisibleForTesting public Collection<Artifact> getDirectJars() { return directJars; } @VisibleForTesting public Collection<Artifact> getCompileTimeDependencyArtifacts() { return compileTimeDependencyArtifacts; } @VisibleForTesting public BuildConfiguration.StrictDepsMode getStrictJavaDepsMode() { return strictJavaDeps; } public PathFragment getClassDirectory() { return classDirectory; } /** * Returns the list of class names of processors that should * be run. */ @VisibleForTesting public List<String> getProcessorNames() { return processorNames; } /** * Returns the output jar artifact that gets generated by archiving the * results of the Java compilation and the declared resources. */ public Artifact getOutputJar() { return outputJar; } @Override public Artifact getPrimaryOutput() { return getOutputJar(); } /** * Constructs a command line that can be used to invoke the * JavaBuilder. * * <p>Do not use this method, except for testing (and for the in-process * strategy). */ @VisibleForTesting public Iterable<String> buildCommandLine() { return javaCompileCommandLine.arguments(); } /** * Returns the command and arguments for a java compile action. */ public List<String> getCommand() { return ImmutableList.copyOf(commandLine.arguments()); } @Override @ThreadCompatible public void execute(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { Executor executor = actionExecutionContext.getExecutor(); try { List<ActionInput> outputs = new ArrayList<>(); outputs.addAll(getOutputs()); // Add a few useful side-effect output files to the list to retrieve. // TODO(bazel-team): Just make these Artifacts. PathFragment classDirectory = getClassDirectory(); outputs.addAll(semantics.getExtraJavaCompileOutputs(classDirectory)); outputs.add(ActionInputHelper.fromPath(classDirectory.getChild("srclist").getPathString())); try { // Make sure the directories exist, else the distributor will bomb. Path classDirectoryPath = executor.getExecRoot().getRelative(getClassDirectory()); FileSystemUtils.createDirectoryAndParents(classDirectoryPath); } catch (IOException e) { throw new EnvironmentalExecException(e.getMessage()); } final ImmutableList<ActionInput> finalOutputs = ImmutableList.copyOf(outputs); Spawn spawn = new BaseSpawn(getCommand(), ImmutableMap.<String, String>of(), ImmutableMap.<String, String>of(), this, LOCAL_RESOURCES) { @Override public Collection<? extends ActionInput> getOutputFiles() { return finalOutputs; } }; executor.getSpawnActionContext(getMnemonic()).exec(spawn, actionExecutionContext); } catch (ExecException e) { throw e.toActionExecutionException("Java compilation in rule '" + getOwner().getLabel() + "'", executor.getVerboseFailures(), this); } } @Override protected String computeKey() { Fingerprint f = new Fingerprint(); f.addString(GUID); f.addStrings(commandLine.arguments()); return f.hexDigestAndReset(); } @Override public String describeKey() { StringBuilder message = new StringBuilder(); for (String arg : ShellEscaper.escapeAll(commandLine.arguments())) { message.append(" Command-line argument: "); message.append(arg); message.append('\n'); } return message.toString(); } @Override public String getMnemonic() { return "Javac"; } @Override protected String getRawProgressMessage() { int count = sourceFiles.size(); if (count == 0) { // nothing to compile, just bundling resources and messages count = resources.size() + classpathResources.size() + messages.size(); } return "Building " + outputJar.prettyPrint() + " (" + count + " files)"; } @Override public String describeStrategy(Executor executor) { return getContext(executor).strategyLocality(getMnemonic(), true); } @Override public ResourceSet estimateResourceConsumption(Executor executor) { SpawnActionContext context = getContext(executor); if (context.isRemotable(getMnemonic(), true)) { return ResourceSet.ZERO; } return LOCAL_RESOURCES; } protected SpawnActionContext getContext(Executor executor) { return executor.getSpawnActionContext(getMnemonic()); } @Override public String toString() { StringBuilder result = new StringBuilder(); result.append("JavaBuilder "); Joiner.on(' ').appendTo(result, commandLine.arguments()); return result.toString(); } @Override public ExtraActionInfo.Builder getExtraActionInfo() { JavaCompileInfo.Builder info = JavaCompileInfo.newBuilder(); info.addAllSourceFile(Artifact.toExecPaths(getSourceFiles())); info.addAllClasspath(Artifact.toExecPaths(getClasspath())); info.addClasspath(getClassDirectory().getPathString()); info.addAllSourcepath(Artifact.toExecPaths(getSourceJars())); info.addAllJavacOpt(getJavacOpts()); info.addAllProcessor(getProcessorNames()); info.addAllProcessorpath(Artifact.toExecPaths(getProcessorpath())); info.setOutputjar(getOutputJar().getExecPathString()); return super.getExtraActionInfo() .setExtension(JavaCompileInfo.javaCompileInfo, info.build()); } /** * Creates an instance. * * @param configuration the build configuration, which provides the default options and the path * to the compiler, etc. * @param classDirectory the directory in which generated classfiles are placed relative to the * exec root * @param sourceGenDirectory the directory where source files generated by annotation processors * should be stored. * @param tempDirectory a directory in which the library builder can store temporary files * relative to the exec root * @param outputJar output jar * @param compressJar if true compress the output jar * @param outputDepsProto the proto file capturing dependency information * @param classpath the complete classpath, the directory in which generated classfiles are placed * @param processorPath the classpath where javac should search for annotation processors * @param processorNames the classes that javac should use as annotation processors * @param messages the message files for translation * @param resources the set of resources to put into the jar * @param classpathResources the set of classpath resources to put into the jar * @param sourceJars the set of jars containing additional source files to compile * @param sourceFiles the set of explicit Java source files to compile * @param javacOpts the compiler options to pass to javac */ private static CustomCommandLine.Builder javaCompileCommandLine( final JavaSemantics semantics, final BuildConfiguration configuration, final PathFragment classDirectory, final PathFragment sourceGenDirectory, PathFragment tempDirectory, Artifact outputJar, Artifact gensrcOutputJar, boolean compressJar, Artifact outputDepsProto, final NestedSet<Artifact> classpath, List<Artifact> processorPath, Artifact langtoolsJar, Artifact javaBuilderJar, List<String> processorNames, Collection<Artifact> messages, Collection<Artifact> resources, Collection<Artifact> classpathResources, Collection<Artifact> sourceJars, Collection<Artifact> sourceFiles, List<String> javacOpts, final Collection<Artifact> directJars, BuildConfiguration.StrictDepsMode strictJavaDeps, Collection<Artifact> compileTimeDependencyArtifacts, String ruleKind, Label targetLabel) { Preconditions.checkNotNull(classDirectory); Preconditions.checkNotNull(tempDirectory); Preconditions.checkNotNull(langtoolsJar); Preconditions.checkNotNull(javaBuilderJar); CustomCommandLine.Builder result = CustomCommandLine.builder(); result.add("--classdir").addPath(classDirectory); result.add("--tempdir").addPath(tempDirectory); if (outputJar != null) { result.addExecPath("--output", outputJar); } if (gensrcOutputJar != null) { result.add("--sourcegendir").addPath(sourceGenDirectory); result.addExecPath("--generated_sources_output", gensrcOutputJar); } if (compressJar) { result.add("--compress_jar"); } if (outputDepsProto != null) { result.addExecPath("--output_deps_proto", outputDepsProto); } result.add("--classpath").add(new CustomArgv() { @Override public String argv() { List<PathFragment> classpathEntries = new ArrayList<>(); for (Artifact classpathArtifact : classpath) { classpathEntries.add(classpathArtifact.getExecPath()); } classpathEntries.add(classDirectory); return Joiner.on(configuration.getHostPathSeparator()).join(classpathEntries); } }); if (!processorPath.isEmpty()) { result.addJoinExecPaths("--processorpath", configuration.getHostPathSeparator(), processorPath); } if (!processorNames.isEmpty()) { result.add("--processors", processorNames); } if (!messages.isEmpty()) { result.add("--messages"); for (Artifact message : messages) { addAsResourcePrefixedExecPath(semantics, message, result); } } if (!resources.isEmpty()) { result.add("--resources"); for (Artifact resource : resources) { addAsResourcePrefixedExecPath(semantics, resource, result); } } if (!classpathResources.isEmpty()) { result.addExecPaths("--classpath_resources", classpathResources); } if (!sourceJars.isEmpty()) { result.addExecPaths("--source_jars", sourceJars); } result.addExecPaths("--sources", sourceFiles); if (!javacOpts.isEmpty()) { result.add("--javacopts", javacOpts); } // strict_java_deps controls whether the mapping from jars to targets is // written out and whether we try to minimize the compile-time classpath. if (strictJavaDeps != BuildConfiguration.StrictDepsMode.OFF) { result.add("--strict_java_deps"); result.add((semantics.useStrictJavaDeps(configuration) ? strictJavaDeps : BuildConfiguration.StrictDepsMode.OFF).toString()); result.add(new CustomMultiArgv() { @Override public Iterable<String> argv() { return addJarsToTargets(classpath, directJars); } }); if (configuration.getFragment(JavaConfiguration.class).getReduceJavaClasspath() == JavaClasspathMode.JAVABUILDER) { result.add("--reduce_classpath"); if (!compileTimeDependencyArtifacts.isEmpty()) { result.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts); } } } if (ruleKind != null) { result.add("--rule_kind"); result.add(ruleKind); } if (targetLabel != null) { result.add("--target_label"); if (targetLabel.getPackageIdentifier().getRepository().isDefault()) { result.add(targetLabel.toString()); } else { // @-prefixed strings will be assumed to be filenames and expanded by // {@link JavaLibraryBuildRequest}, so add an extra &at; to escape it. result.add("@" + targetLabel); } } return result; } private static void addAsResourcePrefixedExecPath(JavaSemantics semantics, Artifact artifact, CustomCommandLine.Builder builder) { PathFragment execPath = artifact.getExecPath(); PathFragment resourcePath = semantics.getJavaResourcePath(artifact.getRootRelativePath()); if (execPath.equals(resourcePath)) { builder.addPaths(":%s", resourcePath); } else { // execPath must end with resourcePath in all cases PathFragment rootPrefix = trimTail(execPath, resourcePath); builder.addPaths("%s:%s", rootPrefix, resourcePath); } } /** * Returns the root-part of a given path by trimming off the end specified by * a given tail. Assumes that the tail is known to match, and simply relies on * the segment lengths. */ private static PathFragment trimTail(PathFragment path, PathFragment tail) { return path.subFragment(0, path.segmentCount() - tail.segmentCount()); } /** * Builds the list of mappings between jars on the classpath and their * originating targets names. */ private static ImmutableList<String> addJarsToTargets( NestedSet<Artifact> classpath, Collection<Artifact> directJars) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Artifact jar : classpath) { builder.add(directJars.contains(jar) ? "--direct_dependency" : "--indirect_dependency"); builder.add(jar.getExecPathString()); Label label = getTargetName(jar); builder.add(label.getPackageIdentifier().getRepository().isDefault() ? label.toString() : label.toPathFragment().toString()); } return builder.build(); } /** * Gets the name of the target that produced the given jar artifact. * * When specifying jars directly in the "srcs" attribute of a rule (mostly * for third_party libraries), there is no generating action, so we just * return the jar name in label form. */ private static Label getTargetName(Artifact jar) { return Preconditions.checkNotNull(jar.getOwner(), jar); } /** * The actual command line executed for a compile action. */ private static CommandLine spawnCommandLine(PathFragment javaExecutable, Artifact javaBuilderJar, Artifact langtoolsJar, Artifact paramFile, ImmutableList<String> javaBuilderJvmFlags) { Preconditions.checkNotNull(langtoolsJar); Preconditions.checkNotNull(javaBuilderJar); return CustomCommandLine.builder() .addPath(javaExecutable) // Langtools jar is placed on the boot classpath so that it can override classes // in the JRE. Typically this has no effect since langtools.jar does not have // classes in common with rt.jar. However, it is necessary when using a version // of javac.jar generated via ant from the langtools build.xml that is of a // different version than AND has an overlap in contents with the default // run-time (eg while upgrading the Java version). .addPaths("-Xbootclasspath/p:%s", langtoolsJar.getExecPath()) .add(javaBuilderJvmFlags) .addExecPath("-jar", javaBuilderJar) .addPaths("@%s", paramFile.getExecPath()) .build(); } /** * Builder class to construct Java compile actions. */ public static class Builder { private final ActionOwner owner; private final AnalysisEnvironment analysisEnvironment; private final BuildConfiguration configuration; private final JavaSemantics semantics; private PathFragment javaExecutable; private List<Artifact> javabaseInputs = ImmutableList.of(); private Artifact outputJar; private Artifact gensrcOutputJar; private Artifact outputDepsProto; private Artifact paramFile; private Artifact metadata; private final Collection<Artifact> sourceFiles = new ArrayList<>(); private final Collection<Artifact> sourceJars = new ArrayList<>(); private final Collection<Artifact> resources = new ArrayList<>(); private final Collection<Artifact> classpathResources = new ArrayList<>(); private final Collection<Artifact> translations = new LinkedHashSet<>(); private BuildConfiguration.StrictDepsMode strictJavaDeps = BuildConfiguration.StrictDepsMode.OFF; private final Collection<Artifact> directJars = new ArrayList<>(); private final Collection<Artifact> compileTimeDependencyArtifacts = new ArrayList<>(); private List<String> javacOpts = new ArrayList<>(); private boolean compressJar; private NestedSet<Artifact> classpathEntries = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableList<Artifact> bootclasspathEntries = ImmutableList.of(); private Artifact javaBuilderJar; private Artifact langtoolsJar; private PathFragment classDirectory; private PathFragment sourceGenDirectory; private PathFragment tempDirectory; private final List<Artifact> processorPath = new ArrayList<>(); private final List<String> processorNames = new ArrayList<>(); private String ruleKind; private Label targetLabel; /** * Creates a Builder from an owner and a build configuration. */ public Builder(ActionOwner owner, AnalysisEnvironment analysisEnvironment, BuildConfiguration configuration, JavaSemantics semantics) { this.owner = owner; this.analysisEnvironment = analysisEnvironment; this.configuration = configuration; this.semantics = semantics; } /** * Creates a Builder from an owner and a build configuration. */ public Builder(RuleContext ruleContext, JavaSemantics semantics) { this(ruleContext.getActionOwner(), ruleContext.getAnalysisEnvironment(), ruleContext.getConfiguration(), semantics); } public JavaCompileAction build() { // TODO(bazel-team): all the params should be calculated before getting here, and the various // aggregation code below should go away. List<String> jcopts = new ArrayList<>(javacOpts); JavaConfiguration javaConfiguration = configuration.getFragment(JavaConfiguration.class); if (javaConfiguration.getJavaWarns().size() > 0) { jcopts.add("-Xlint:" + Joiner.on(',').join(javaConfiguration.getJavaWarns())); } if (!bootclasspathEntries.isEmpty()) { jcopts.add("-bootclasspath"); jcopts.add( Artifact.joinExecPaths(configuration.getHostPathSeparator(), bootclasspathEntries)); } List<String> internedJcopts = new ArrayList<>(); for (String jcopt : jcopts) { internedJcopts.add(StringCanonicalizer.intern(jcopt)); } // Invariant: if strictJavaDeps is OFF, then directJars and // dependencyArtifacts are ignored if (strictJavaDeps == BuildConfiguration.StrictDepsMode.OFF) { directJars.clear(); compileTimeDependencyArtifacts.clear(); } // Invariant: if experimental_java_classpath is not set to 'javabuilder', // dependencyArtifacts are ignored if (javaConfiguration.getReduceJavaClasspath() != JavaClasspathMode.JAVABUILDER) { compileTimeDependencyArtifacts.clear(); } if (paramFile == null) { paramFile = analysisEnvironment.getDerivedArtifact( ParameterFile.derivePath(outputJar.getRootRelativePath()), configuration.getBinDirectory()); } // ImmutableIterable is safe to use here because we know that neither of the components of // the Iterable.concat() will change. Without ImmutableIterable, AbstractAction will // waste memory by making a preventive copy of the iterable. Iterable<Artifact> baseInputs = ImmutableIterable.from(Iterables.concat( javabaseInputs, bootclasspathEntries, ImmutableList.of(paramFile))); Preconditions.checkState(javaExecutable != null, owner); Preconditions.checkState(javaExecutable.isAbsolute() ^ !javabaseInputs.isEmpty(), javaExecutable); Collection<Artifact> outputs; ImmutableList.Builder<Artifact> outputsBuilder = ImmutableList.builder(); outputsBuilder.add(outputJar); if (metadata != null) { outputsBuilder.add(metadata); } if (gensrcOutputJar != null) { outputsBuilder.add(gensrcOutputJar); } if (outputDepsProto != null) { outputsBuilder.add(outputDepsProto); } outputs = outputsBuilder.build(); CustomCommandLine.Builder paramFileContentsBuilder = javaCompileCommandLine( semantics, configuration, classDirectory, sourceGenDirectory, tempDirectory, outputJar, gensrcOutputJar, compressJar, outputDepsProto, classpathEntries, processorPath, langtoolsJar, javaBuilderJar, processorNames, translations, resources, classpathResources, sourceJars, sourceFiles, internedJcopts, directJars, strictJavaDeps, compileTimeDependencyArtifacts, ruleKind, targetLabel); semantics.buildJavaCommandLine(outputs, configuration, paramFileContentsBuilder); CommandLine paramFileContents = paramFileContentsBuilder.build(); Action parameterFileWriteAction = new ParameterFileWriteAction(owner, paramFile, paramFileContents, ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1); analysisEnvironment.registerAction(parameterFileWriteAction); CommandLine javaBuilderCommandLine = spawnCommandLine( javaExecutable, javaBuilderJar, langtoolsJar, paramFile, javaConfiguration.getDefaultJavaBuilderJvmFlags()); return new JavaCompileAction(owner, baseInputs, outputs, paramFileContents, javaBuilderCommandLine, classDirectory, outputJar, classpathEntries, processorPath, langtoolsJar, javaBuilderJar, processorNames, translations, resources, classpathResources, sourceJars, sourceFiles, internedJcopts, directJars, strictJavaDeps, compileTimeDependencyArtifacts, semantics); } public Builder setParameterFile(Artifact paramFile) { this.paramFile = paramFile; return this; } public Builder setJavaExecutable(PathFragment javaExecutable) { this.javaExecutable = javaExecutable; return this; } public Builder setJavaBaseInputs(Iterable<Artifact> javabaseInputs) { this.javabaseInputs = ImmutableList.copyOf(javabaseInputs); return this; } public Builder setOutputJar(Artifact outputJar) { this.outputJar = outputJar; return this; } public Builder setGensrcOutputJar(Artifact gensrcOutputJar) { this.gensrcOutputJar = gensrcOutputJar; return this; } public Builder setOutputDepsProto(Artifact outputDepsProto) { this.outputDepsProto = outputDepsProto; return this; } public Builder setMetadata(Artifact metadata) { this.metadata = metadata; return this; } public Builder addSourceFile(Artifact sourceFile) { sourceFiles.add(sourceFile); return this; } public Builder addSourceFiles(Collection<Artifact> sourceFiles) { this.sourceFiles.addAll(sourceFiles); return this; } public Builder addSourceJars(Collection<Artifact> sourceJars) { this.sourceJars.addAll(sourceJars); return this; } public Builder addResources(Collection<Artifact> resources) { this.resources.addAll(resources); return this; } public Builder addClasspathResources(Collection<Artifact> classpathResources) { this.classpathResources.addAll(classpathResources); return this; } public Builder addTranslations(Collection<Artifact> translations) { this.translations.addAll(translations); return this; } /** * Sets the strictness of Java dependency checking, see {@link * com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode}. */ public Builder setStrictJavaDeps(BuildConfiguration.StrictDepsMode strictDeps) { strictJavaDeps = strictDeps; return this; } /** * Accumulates the given jar artifacts as being provided by direct dependencies. */ public Builder addDirectJars(Collection<Artifact> directJars) { Iterables.addAll(this.directJars, directJars); return this; } public Builder addCompileTimeDependencyArtifacts(Collection<Artifact> dependencyArtifacts) { Iterables.addAll(this.compileTimeDependencyArtifacts, dependencyArtifacts); return this; } public Builder setJavacOpts(Iterable<String> copts) { this.javacOpts = ImmutableList.copyOf(copts); return this; } public Builder setCompressJar(boolean compressJar) { this.compressJar = compressJar; return this; } public Builder setClasspathEntries(NestedSet<Artifact> classpathEntries) { this.classpathEntries = classpathEntries; return this; } public Builder setBootclasspathEntries(Iterable<Artifact> bootclasspathEntries) { this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries); return this; } public Builder setClassDirectory(PathFragment classDirectory) { this.classDirectory = classDirectory; return this; } /** * Sets the directory where source files generated by annotation processors should be stored. */ public Builder setSourceGenDirectory(PathFragment sourceGenDirectory) { this.sourceGenDirectory = sourceGenDirectory; return this; } public Builder setTempDirectory(PathFragment tempDirectory) { this.tempDirectory = tempDirectory; return this; } public Builder addProcessorPaths(Collection<Artifact> processorPaths) { this.processorPath.addAll(processorPaths); return this; } public Builder addProcessorNames(Collection<String> processorNames) { this.processorNames.addAll(processorNames); return this; } public Builder setLangtoolsJar(Artifact langtoolsJar) { this.langtoolsJar = langtoolsJar; return this; } public Builder setJavaBuilderJar(Artifact javaBuilderJar) { this.javaBuilderJar = javaBuilderJar; return this; } public Builder setRuleKind(String ruleKind) { this.ruleKind = ruleKind; return this; } public Builder setTargetLabel(Label targetLabel) { this.targetLabel = targetLabel; return this; } } }
src/main/java/com/google/devtools/build/lib/rules/java/JavaCompileAction.java
// Copyright 2014 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.devtools.build.lib.rules.java; import static java.nio.charset.StandardCharsets.ISO_8859_1; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.devtools.build.lib.actions.AbstractAction; import com.google.devtools.build.lib.actions.Action; import com.google.devtools.build.lib.actions.ActionExecutionContext; import com.google.devtools.build.lib.actions.ActionExecutionException; import com.google.devtools.build.lib.actions.ActionInput; import com.google.devtools.build.lib.actions.ActionInputHelper; import com.google.devtools.build.lib.actions.ActionOwner; import com.google.devtools.build.lib.actions.Artifact; import com.google.devtools.build.lib.actions.BaseSpawn; import com.google.devtools.build.lib.actions.EnvironmentalExecException; import com.google.devtools.build.lib.actions.ExecException; import com.google.devtools.build.lib.actions.Executor; import com.google.devtools.build.lib.actions.ParameterFile; import com.google.devtools.build.lib.actions.ResourceSet; import com.google.devtools.build.lib.actions.Spawn; import com.google.devtools.build.lib.actions.SpawnActionContext; import com.google.devtools.build.lib.actions.extra.ExtraActionInfo; import com.google.devtools.build.lib.actions.extra.JavaCompileInfo; import com.google.devtools.build.lib.analysis.AnalysisEnvironment; import com.google.devtools.build.lib.analysis.RuleContext; import com.google.devtools.build.lib.analysis.actions.CommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.CustomArgv; import com.google.devtools.build.lib.analysis.actions.CustomCommandLine.CustomMultiArgv; import com.google.devtools.build.lib.analysis.actions.ParameterFileWriteAction; import com.google.devtools.build.lib.analysis.config.BuildConfiguration; import com.google.devtools.build.lib.collect.ImmutableIterable; import com.google.devtools.build.lib.collect.nestedset.NestedSet; import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder; import com.google.devtools.build.lib.collect.nestedset.Order; import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadCompatible; import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode; import com.google.devtools.build.lib.syntax.Label; import com.google.devtools.build.lib.util.Fingerprint; import com.google.devtools.build.lib.util.ShellEscaper; import com.google.devtools.build.lib.util.StringCanonicalizer; import com.google.devtools.build.lib.vfs.FileSystemUtils; import com.google.devtools.build.lib.vfs.Path; import com.google.devtools.build.lib.vfs.PathFragment; import java.io.IOException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; /** * Action that represents a Java compilation. */ @ThreadCompatible public class JavaCompileAction extends AbstractAction { private static final String GUID = "786e174d-ed97-4e79-9f61-ae74430714cf"; private static final ResourceSet LOCAL_RESOURCES = new ResourceSet(750 /*MB*/, 0.5 /*CPU*/, 0.0 /*IO*/); private final CommandLine javaCompileCommandLine; private final CommandLine commandLine; /** * The directory in which generated classfiles are placed. * May be erased/created by the JavaBuilder. */ private final PathFragment classDirectory; private final Artifact outputJar; /** * The list of classpath entries to specify to javac. */ private final NestedSet<Artifact> classpath; /** * The list of classpath entries to search for annotation processors. */ private final ImmutableList<Artifact> processorPath; /** * The list of annotation processor classes to run. */ private final ImmutableList<String> processorNames; /** * The translation messages. */ private final ImmutableList<Artifact> messages; /** * The set of resources to put into the jar. */ private final ImmutableList<Artifact> resources; /** * The set of classpath resources to put into the jar. */ private final ImmutableList<Artifact> classpathResources; /** * The set of files which contain lists of additional Java source files to * compile. */ private final ImmutableList<Artifact> sourceJars; /** * The set of explicit Java source files to compile. */ private final ImmutableList<Artifact> sourceFiles; /** * The compiler options to pass to javac. */ private final ImmutableList<String> javacOpts; /** * The subset of classpath jars provided by direct dependencies. */ private final ImmutableList<Artifact> directJars; /** * The level of strict dependency checks (off, warnings, or errors). */ private final BuildConfiguration.StrictDepsMode strictJavaDeps; /** * The set of .deps artifacts provided by direct dependencies. */ private final ImmutableList<Artifact> compileTimeDependencyArtifacts; /** * The java semantics to get the list of action outputs. */ private final JavaSemantics semantics; /** * Constructs an action to compile a set of Java source files to class files. * * @param owner the action owner, typically a java_* RuleConfiguredTarget. * @param baseInputs the set of the input artifacts of the compile action * without the parameter file action; * @param outputs the outputs of the action * @param javaCompileCommandLine the command line for the java library * builder - it's actually written to the parameter file, but other * parts (for example, ide_build_info) need access to the data * @param commandLine the actual invocation command line */ private JavaCompileAction(ActionOwner owner, Iterable<Artifact> baseInputs, Collection<Artifact> outputs, CommandLine javaCompileCommandLine, CommandLine commandLine, PathFragment classDirectory, Artifact outputJar, NestedSet<Artifact> classpath, List<Artifact> processorPath, Artifact langtoolsJar, Artifact javaBuilderJar, List<String> processorNames, Collection<Artifact> messages, Collection<Artifact> resources, Collection<Artifact> classpathResources, Collection<Artifact> sourceJars, Collection<Artifact> sourceFiles, List<String> javacOpts, Collection<Artifact> directJars, BuildConfiguration.StrictDepsMode strictJavaDeps, Collection<Artifact> compileTimeDependencyArtifacts, JavaSemantics semantics) { super(owner, Iterables.concat(ImmutableList.of( classpath, processorPath, messages, resources, classpathResources, sourceJars, sourceFiles, compileTimeDependencyArtifacts, ImmutableList.of(langtoolsJar, javaBuilderJar), baseInputs)), outputs); this.javaCompileCommandLine = javaCompileCommandLine; this.commandLine = commandLine; this.classDirectory = Preconditions.checkNotNull(classDirectory); this.outputJar = outputJar; this.classpath = classpath; this.processorPath = ImmutableList.copyOf(processorPath); this.processorNames = ImmutableList.copyOf(processorNames); this.messages = ImmutableList.copyOf(messages); this.resources = ImmutableList.copyOf(resources); this.classpathResources = ImmutableList.copyOf(classpathResources); this.sourceJars = ImmutableList.copyOf(sourceJars); this.sourceFiles = ImmutableList.copyOf(sourceFiles); this.javacOpts = ImmutableList.copyOf(javacOpts); this.directJars = ImmutableList.copyOf(directJars); this.strictJavaDeps = strictJavaDeps; this.compileTimeDependencyArtifacts = ImmutableList.copyOf(compileTimeDependencyArtifacts); this.semantics = semantics; } /** * Returns the given (passed to constructor) source files. */ @VisibleForTesting public Collection<Artifact> getSourceFiles() { return sourceFiles; } /** * Returns the list of paths that represent the resources to be added to the * jar. */ @VisibleForTesting public Collection<Artifact> getResources() { return resources; } /** * Returns the list of paths that represents the classpath. */ @VisibleForTesting public Iterable<Artifact> getClasspath() { return classpath; } /** * Returns the list of paths that represents the source jars. */ @VisibleForTesting public Collection<Artifact> getSourceJars() { return sourceJars; } /** * Returns the list of paths that represents the processor path. */ @VisibleForTesting public List<Artifact> getProcessorpath() { return processorPath; } @VisibleForTesting public List<String> getJavacOpts() { return javacOpts; } @VisibleForTesting public Collection<Artifact> getDirectJars() { return directJars; } @VisibleForTesting public Collection<Artifact> getCompileTimeDependencyArtifacts() { return compileTimeDependencyArtifacts; } @VisibleForTesting public BuildConfiguration.StrictDepsMode getStrictJavaDepsMode() { return strictJavaDeps; } public PathFragment getClassDirectory() { return classDirectory; } /** * Returns the list of class names of processors that should * be run. */ @VisibleForTesting public List<String> getProcessorNames() { return processorNames; } /** * Returns the output jar artifact that gets generated by archiving the * results of the Java compilation and the declared resources. */ public Artifact getOutputJar() { return outputJar; } @Override public Artifact getPrimaryOutput() { return getOutputJar(); } /** * Constructs a command line that can be used to invoke the * JavaBuilder. * * <p>Do not use this method, except for testing (and for the in-process * strategy). */ @VisibleForTesting public Iterable<String> buildCommandLine() { return javaCompileCommandLine.arguments(); } /** * Returns the command and arguments for a java compile action. */ public List<String> getCommand() { return ImmutableList.copyOf(commandLine.arguments()); } @Override @ThreadCompatible public void execute(ActionExecutionContext actionExecutionContext) throws ActionExecutionException, InterruptedException { Executor executor = actionExecutionContext.getExecutor(); try { List<ActionInput> outputs = new ArrayList<>(); outputs.addAll(getOutputs()); // Add a few useful side-effect output files to the list to retrieve. // TODO(bazel-team): Just make these Artifacts. PathFragment classDirectory = getClassDirectory(); outputs.addAll(semantics.getExtraJavaCompileOutputs(classDirectory)); outputs.add(ActionInputHelper.fromPath(classDirectory.getChild("srclist").getPathString())); try { // Make sure the directories exist, else the distributor will bomb. Path classDirectoryPath = executor.getExecRoot().getRelative(getClassDirectory()); FileSystemUtils.createDirectoryAndParents(classDirectoryPath); } catch (IOException e) { throw new EnvironmentalExecException(e.getMessage()); } final ImmutableList<ActionInput> finalOutputs = ImmutableList.copyOf(outputs); Spawn spawn = new BaseSpawn(getCommand(), ImmutableMap.<String, String>of(), ImmutableMap.<String, String>of(), this, LOCAL_RESOURCES) { @Override public Collection<? extends ActionInput> getOutputFiles() { return finalOutputs; } }; executor.getSpawnActionContext(getMnemonic()).exec(spawn, actionExecutionContext); } catch (ExecException e) { throw e.toActionExecutionException("Java compilation in rule '" + getOwner().getLabel() + "'", executor.getVerboseFailures(), this); } } @Override protected String computeKey() { Fingerprint f = new Fingerprint(); f.addString(GUID); f.addStrings(commandLine.arguments()); return f.hexDigestAndReset(); } @Override public String describeKey() { StringBuilder message = new StringBuilder(); for (String arg : ShellEscaper.escapeAll(commandLine.arguments())) { message.append(" Command-line argument: "); message.append(arg); message.append('\n'); } return message.toString(); } @Override public String getMnemonic() { return "Javac"; } @Override protected String getRawProgressMessage() { int count = sourceFiles.size(); if (count == 0) { // nothing to compile, just bundling resources and messages count = resources.size() + classpathResources.size() + messages.size(); } return "Building " + outputJar.prettyPrint() + " (" + count + " files)"; } @Override public String describeStrategy(Executor executor) { return getContext(executor).strategyLocality(getMnemonic(), true); } @Override public ResourceSet estimateResourceConsumption(Executor executor) { SpawnActionContext context = getContext(executor); if (context.isRemotable(getMnemonic(), true)) { return ResourceSet.ZERO; } return LOCAL_RESOURCES; } protected SpawnActionContext getContext(Executor executor) { return executor.getSpawnActionContext(getMnemonic()); } @Override public String toString() { StringBuilder result = new StringBuilder(); result.append("JavaBuilder "); Joiner.on(' ').appendTo(result, commandLine.arguments()); return result.toString(); } @Override public ExtraActionInfo.Builder getExtraActionInfo() { JavaCompileInfo.Builder info = JavaCompileInfo.newBuilder(); info.addAllSourceFile(Artifact.toExecPaths(getSourceFiles())); info.addAllClasspath(Artifact.toExecPaths(getClasspath())); info.addClasspath(getClassDirectory().getPathString()); info.addAllSourcepath(Artifact.toExecPaths(getSourceJars())); info.addAllJavacOpt(getJavacOpts()); info.addAllProcessor(getProcessorNames()); info.addAllProcessorpath(Artifact.toExecPaths(getProcessorpath())); info.setOutputjar(getOutputJar().getExecPathString()); return super.getExtraActionInfo() .setExtension(JavaCompileInfo.javaCompileInfo, info.build()); } /** * Creates an instance. * * @param configuration the build configuration, which provides the default options and the path * to the compiler, etc. * @param classDirectory the directory in which generated classfiles are placed relative to the * exec root * @param sourceGenDirectory the directory where source files generated by annotation processors * should be stored. * @param tempDirectory a directory in which the library builder can store temporary files * relative to the exec root * @param outputJar output jar * @param compressJar if true compress the output jar * @param outputDepsProto the proto file capturing dependency information * @param classpath the complete classpath, the directory in which generated classfiles are placed * @param processorPath the classpath where javac should search for annotation processors * @param processorNames the classes that javac should use as annotation processors * @param messages the message files for translation * @param resources the set of resources to put into the jar * @param classpathResources the set of classpath resources to put into the jar * @param sourceJars the set of jars containing additional source files to compile * @param sourceFiles the set of explicit Java source files to compile * @param javacOpts the compiler options to pass to javac */ private static CustomCommandLine.Builder javaCompileCommandLine( final JavaSemantics semantics, final BuildConfiguration configuration, final PathFragment classDirectory, final PathFragment sourceGenDirectory, PathFragment tempDirectory, Artifact outputJar, Artifact gensrcOutputJar, boolean compressJar, Artifact outputDepsProto, final NestedSet<Artifact> classpath, List<Artifact> processorPath, Artifact langtoolsJar, Artifact javaBuilderJar, List<String> processorNames, Collection<Artifact> messages, Collection<Artifact> resources, Collection<Artifact> classpathResources, Collection<Artifact> sourceJars, Collection<Artifact> sourceFiles, List<String> javacOpts, final Collection<Artifact> directJars, BuildConfiguration.StrictDepsMode strictJavaDeps, Collection<Artifact> compileTimeDependencyArtifacts, String ruleKind, Label targetLabel) { Preconditions.checkNotNull(classDirectory); Preconditions.checkNotNull(tempDirectory); Preconditions.checkNotNull(langtoolsJar); Preconditions.checkNotNull(javaBuilderJar); CustomCommandLine.Builder result = CustomCommandLine.builder(); result.add("--classdir").addPath(classDirectory); result.add("--tempdir").addPath(tempDirectory); if (outputJar != null) { result.addExecPath("--output", outputJar); } if (gensrcOutputJar != null) { result.add("--sourcegendir").addPath(sourceGenDirectory); result.addExecPath("--generated_sources_output", gensrcOutputJar); } if (compressJar) { result.add("--compress_jar"); } if (outputDepsProto != null) { result.addExecPath("--output_deps_proto", outputDepsProto); } result.add("--classpath").add(new CustomArgv() { @Override public String argv() { List<PathFragment> classpathEntries = new ArrayList<>(); for (Artifact classpathArtifact : classpath) { classpathEntries.add(classpathArtifact.getExecPath()); } classpathEntries.add(classDirectory); return Joiner.on(configuration.getHostPathSeparator()).join(classpathEntries); } }); if (!processorPath.isEmpty()) { result.addJoinExecPaths("--processorpath", configuration.getHostPathSeparator(), processorPath); } if (!processorNames.isEmpty()) { result.add("--processors", processorNames); } if (!messages.isEmpty()) { result.add("--messages"); for (Artifact message : messages) { addAsResourcePrefixedExecPath(semantics, message, result); } } if (!resources.isEmpty()) { result.add("--resources"); for (Artifact resource : resources) { addAsResourcePrefixedExecPath(semantics, resource, result); } } if (!classpathResources.isEmpty()) { result.addExecPaths("--classpath_resources", classpathResources); } if (!sourceJars.isEmpty()) { result.addExecPaths("--source_jars", sourceJars); } result.addExecPaths("--sources", sourceFiles); if (!javacOpts.isEmpty()) { result.add("--javacopts", javacOpts); } // strict_java_deps controls whether the mapping from jars to targets is // written out and whether we try to minimize the compile-time classpath. if (strictJavaDeps != BuildConfiguration.StrictDepsMode.OFF) { result.add("--strict_java_deps"); result.add((semantics.useStrictJavaDeps(configuration) ? strictJavaDeps : BuildConfiguration.StrictDepsMode.OFF).toString()); result.add(new CustomMultiArgv() { @Override public Iterable<String> argv() { return addJarsToTargets(classpath, directJars); } }); if (configuration.getFragment(JavaConfiguration.class).getReduceJavaClasspath() == JavaClasspathMode.JAVABUILDER) { result.add("--reduce_classpath"); if (!compileTimeDependencyArtifacts.isEmpty()) { result.addExecPaths("--deps_artifacts", compileTimeDependencyArtifacts); } } } if (ruleKind != null) { result.add("--rule_kind"); result.add(ruleKind); } if (targetLabel != null) { result.add("--target_label"); if (targetLabel.getPackageIdentifier().getRepository().isDefault()) { result.add(targetLabel.toString()); } else { // @-prefixed strings will be assumed to be filenames and expanded by // {@link JavaLibraryBuildRequest}, so add an extra &at; to escape it. result.add("@" + targetLabel); } } return result; } private static void addAsResourcePrefixedExecPath(JavaSemantics semantics, Artifact artifact, CustomCommandLine.Builder builder) { PathFragment execPath = artifact.getExecPath(); PathFragment resourcePath = semantics.getJavaResourcePath(artifact.getRootRelativePath()); if (execPath.equals(resourcePath)) { builder.addPaths(":%s", resourcePath); } else { // execPath must end with resourcePath in all cases PathFragment rootPrefix = trimTail(execPath, resourcePath); builder.addPaths("%s:%s", rootPrefix, resourcePath); } } /** * Returns the root-part of a given path by trimming off the end specified by * a given tail. Assumes that the tail is known to match, and simply relies on * the segment lengths. */ private static PathFragment trimTail(PathFragment path, PathFragment tail) { return path.subFragment(0, path.segmentCount() - tail.segmentCount()); } /** * Builds the list of mappings between jars on the classpath and their * originating targets names. */ private static ImmutableList<String> addJarsToTargets( NestedSet<Artifact> classpath, Collection<Artifact> directJars) { ImmutableList.Builder<String> builder = ImmutableList.builder(); for (Artifact jar : classpath) { builder.add(directJars.contains(jar) ? "--direct_dependency" : "--indirect_dependency"); builder.add(jar.getExecPathString()); Label label = getTargetName(jar); builder.add(label.getPackageIdentifier().getRepository().isDefault() ? label.toString() : label.toPathFragment().toString()); } return builder.build(); } /** * Gets the name of the target that produced the given jar artifact. * * When specifying jars directly in the "srcs" attribute of a rule (mostly * for third_party libraries), there is no generating action, so we just * return the jar name in label form. */ private static Label getTargetName(Artifact jar) { return Preconditions.checkNotNull(jar.getOwner(), jar); } /** * The actual command line executed for a compile action. */ private static CommandLine spawnCommandLine(PathFragment javaExecutable, Artifact javaBuilderJar, Artifact langtoolsJar, Artifact paramFile, ImmutableList<String> javaBuilderJvmFlags) { Preconditions.checkNotNull(langtoolsJar); Preconditions.checkNotNull(javaBuilderJar); return CustomCommandLine.builder() .addPath(javaExecutable) // Langtools jar is placed on the boot classpath so that it can override classes // in the JRE. Typically this has no effect since langtools.jar does not have // classes in common with rt.jar. However, it is necessary when using a version // of javac.jar generated via ant from the langtools build.xml that is of a // different version than AND has an overlap in contents with the default // run-time (eg while upgrading the Java version). .addPaths("-Xbootclasspath/p:%s", langtoolsJar.getExecPath()) .add(javaBuilderJvmFlags) .addExecPath("-jar", javaBuilderJar) .addPaths("@%s", paramFile.getExecPath()) .build(); } /** * Builder class to construct Java compile actions. */ public static class Builder { private final ActionOwner owner; private final AnalysisEnvironment analysisEnvironment; private final BuildConfiguration configuration; private final JavaSemantics semantics; private PathFragment javaExecutable; private List<Artifact> javabaseInputs = ImmutableList.of(); private Artifact outputJar; private Artifact gensrcOutputJar; private Artifact outputDepsProto; private Artifact paramFile; private Artifact metadata; private final Collection<Artifact> sourceFiles = new ArrayList<>(); private final Collection<Artifact> sourceJars = new ArrayList<>(); private final Collection<Artifact> resources = new ArrayList<>(); private final Collection<Artifact> classpathResources = new ArrayList<>(); private final Collection<Artifact> translations = new LinkedHashSet<>(); private BuildConfiguration.StrictDepsMode strictJavaDeps = BuildConfiguration.StrictDepsMode.OFF; private final Collection<Artifact> directJars = new ArrayList<>(); private final Collection<Artifact> compileTimeDependencyArtifacts = new ArrayList<>(); private List<String> javacOpts = new ArrayList<>(); private boolean compressJar; private NestedSet<Artifact> classpathEntries = NestedSetBuilder.emptySet(Order.NAIVE_LINK_ORDER); private ImmutableList<Artifact> bootclasspathEntries = ImmutableList.of(); private Artifact javaBuilderJar; private Artifact langtoolsJar; private PathFragment classDirectory; private PathFragment sourceGenDirectory; private PathFragment tempDirectory; private final List<Artifact> processorPath = new ArrayList<>(); private final List<String> processorNames = new ArrayList<>(); private String ruleKind; private Label targetLabel; /** * Creates a Builder from an owner and a build configuration. */ public Builder(ActionOwner owner, AnalysisEnvironment analysisEnvironment, BuildConfiguration configuration, JavaSemantics semantics) { this.owner = owner; this.analysisEnvironment = analysisEnvironment; this.configuration = configuration; this.semantics = semantics; } /** * Creates a Builder from an owner and a build configuration. */ public Builder(RuleContext ruleContext, JavaSemantics semantics) { this(ruleContext.getActionOwner(), ruleContext.getAnalysisEnvironment(), ruleContext.getConfiguration(), semantics); } public JavaCompileAction build() { // TODO(bazel-team): all the params should be calculated before getting here, and the various // aggregation code below should go away. List<String> jcopts = new ArrayList<>(javacOpts); JavaConfiguration javaConfiguration = configuration.getFragment(JavaConfiguration.class); if (javaConfiguration.getJavaWarns().size() > 0) { jcopts.add("-Xlint:" + Joiner.on(',').join(javaConfiguration.getJavaWarns())); } if (!bootclasspathEntries.isEmpty()) { jcopts.add("-bootclasspath"); jcopts.add( Artifact.joinExecPaths(configuration.getHostPathSeparator(), bootclasspathEntries)); } List<String> internedJcopts = new ArrayList<>(); for (String jcopt : jcopts) { internedJcopts.add(StringCanonicalizer.intern(jcopt)); } // Invariant: if strictJavaDeps is OFF, then directJars and // dependencyArtifacts are ignored if (strictJavaDeps == BuildConfiguration.StrictDepsMode.OFF) { directJars.clear(); compileTimeDependencyArtifacts.clear(); } // Invariant: if experimental_java_classpath is not set to 'javabuilder', // dependencyArtifacts are ignored if (javaConfiguration.getReduceJavaClasspath() != JavaClasspathMode.JAVABUILDER) { compileTimeDependencyArtifacts.clear(); } if (paramFile == null) { paramFile = analysisEnvironment.getDerivedArtifact( ParameterFile.derivePath(outputJar.getRootRelativePath()), configuration.getBinDirectory()); } // ImmutableIterable is safe to use here because we know that neither of the components of // the Iterable.concat() will change. Without ImmutableIterable, AbstractAction will // waste memory by making a preventive copy of the iterable. Iterable<Artifact> baseInputs = ImmutableIterable.from(Iterables.concat( javabaseInputs, bootclasspathEntries, ImmutableList.of(paramFile))); Preconditions.checkState(javaExecutable != null, owner); Preconditions.checkState(javaExecutable.isAbsolute() ^ !javabaseInputs.isEmpty(), javaExecutable); Collection<Artifact> outputs; ImmutableList.Builder<Artifact> outputsBuilder = ImmutableList.builder(); outputsBuilder.add(outputJar); if (metadata != null) { outputsBuilder.add(metadata); } if (gensrcOutputJar != null) { outputsBuilder.add(gensrcOutputJar); } if (outputDepsProto != null) { outputsBuilder.add(outputDepsProto); } outputs = outputsBuilder.build(); CustomCommandLine.Builder paramFileContentsBuilder = javaCompileCommandLine( semantics, configuration, classDirectory, sourceGenDirectory, tempDirectory, outputJar, gensrcOutputJar, compressJar, outputDepsProto, classpathEntries, processorPath, langtoolsJar, javaBuilderJar, processorNames, translations, resources, classpathResources, sourceJars, sourceFiles, internedJcopts, directJars, strictJavaDeps, compileTimeDependencyArtifacts, ruleKind, targetLabel); semantics.buildJavaCommandLine(outputs, configuration, paramFileContentsBuilder); CommandLine paramFileContents = paramFileContentsBuilder.build(); Action parameterFileWriteAction = new ParameterFileWriteAction(owner, paramFile, paramFileContents, ParameterFile.ParameterFileType.UNQUOTED, ISO_8859_1); analysisEnvironment.registerAction(parameterFileWriteAction); CommandLine javaBuilderCommandLine = spawnCommandLine( javaExecutable, javaBuilderJar, langtoolsJar, paramFile, javaConfiguration.getDefaultJavaBuilderJvmFlags()); return new JavaCompileAction(owner, baseInputs, outputs, paramFileContents, javaBuilderCommandLine, classDirectory, outputJar, classpathEntries, processorPath, langtoolsJar, javaBuilderJar, processorNames, translations, resources, classpathResources, sourceJars, sourceFiles, internedJcopts, directJars, strictJavaDeps, compileTimeDependencyArtifacts, semantics); } public Builder setParameterFile(Artifact paramFile) { this.paramFile = paramFile; return this; } public Builder setJavaExecutable(PathFragment javaExecutable) { this.javaExecutable = javaExecutable; return this; } public Builder setJavaBaseInputs(Iterable<Artifact> javabaseInputs) { this.javabaseInputs = ImmutableList.copyOf(javabaseInputs); return this; } public Builder setOutputJar(Artifact outputJar) { this.outputJar = outputJar; return this; } public Builder setGensrcOutputJar(Artifact gensrcOutputJar) { this.gensrcOutputJar = gensrcOutputJar; return this; } public Builder setOutputDepsProto(Artifact outputDepsProto) { this.outputDepsProto = outputDepsProto; return this; } public Builder setMetadata(Artifact metadata) { this.metadata = metadata; return this; } public Builder addSourceFile(Artifact sourceFile) { sourceFiles.add(sourceFile); return this; } public Builder addSourceFiles(Collection<Artifact> sourceFiles) { this.sourceFiles.addAll(sourceFiles); return this; } public Builder addSourceJars(Collection<Artifact> sourceJars) { this.sourceJars.addAll(sourceJars); return this; } public Builder addResources(Collection<Artifact> resources) { this.resources.addAll(resources); return this; } public Builder addClasspathResources(Collection<Artifact> classpathResources) { this.classpathResources.addAll(classpathResources); return this; } public Builder addTranslations(Collection<Artifact> translations) { this.translations.addAll(translations); return this; } /** * Sets the strictness of Java dependency checking, see {@link * com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode}. */ public Builder setStrictJavaDeps(BuildConfiguration.StrictDepsMode strictDeps) { strictJavaDeps = strictDeps; return this; } /** * Accumulates the given jar artifacts as being provided by direct dependencies. */ public Builder addDirectJars(Collection<Artifact> directJars) { Iterables.addAll(this.directJars, directJars); return this; } public Builder addCompileTimeDependencyArtifacts(Collection<Artifact> dependencyArtifacts) { Iterables.addAll(this.compileTimeDependencyArtifacts, dependencyArtifacts); return this; } public Builder setJavacOpts(Iterable<String> copts) { this.javacOpts = ImmutableList.copyOf(copts); return this; } public Builder setCompressJar(boolean compressJar) { this.compressJar = compressJar; return this; } public Builder setClasspathEntries(NestedSet<Artifact> classpathEntries) { this.classpathEntries = classpathEntries; return this; } public Builder setBootclasspathEntries(Iterable<Artifact> bootclasspathEntries) { this.bootclasspathEntries = ImmutableList.copyOf(bootclasspathEntries); return this; } public Builder setClassDirectory(PathFragment classDirectory) { this.classDirectory = classDirectory; return this; } /** * Sets the directory where source files generated by annotation processors should be stored. */ public Builder setSourceGenDirectory(PathFragment sourceGenDirectory) { this.sourceGenDirectory = sourceGenDirectory; return this; } public Builder setTempDirectory(PathFragment tempDirectory) { this.tempDirectory = tempDirectory; return this; } public Builder addProcessorPaths(Collection<Artifact> processorPaths) { this.processorPath.addAll(processorPaths); return this; } public Builder addProcessorNames(Collection<String> processorNames) { this.processorNames.addAll(processorNames); return this; } public Builder setLangtoolsJar(Artifact langtoolsJar) { this.langtoolsJar = langtoolsJar; return this; } public Builder setJavaBuilderJar(Artifact javaBuilderJar) { this.javaBuilderJar = javaBuilderJar; return this; } public Builder setRuleKind(String ruleKind) { this.ruleKind = ruleKind; return this; } public Builder setTargetLabel(Label targetLabel) { this.targetLabel = targetLabel; return this; } } }
JavaCompileAction uses a NestedSet for inputs. This way it doesn't flatten classpathEntries during the analysis phase. Deprecated NestedSetBuilder.addAll(NestedSet) and Runfiles.Builder.addArtifacts(NestedSet) method calls are replaced with the proper methods in []. With the above changes ChainedRuleDependenciesTest passes for []. -- MOS_MIGRATED_REVID=86179486
src/main/java/com/google/devtools/build/lib/rules/java/JavaCompileAction.java
JavaCompileAction uses a NestedSet for inputs. This way it doesn't flatten classpathEntries during the analysis phase.
Java
apache-2.0
6fe26d8db64973f0e06f892dbb66bfffa8c85893
0
quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus,quarkusio/quarkus
package io.quarkus.deployment.builditem.nativeimage; import static java.util.Arrays.stream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import io.quarkus.builder.item.MultiBuildItem; /** * Used to register a class for reflection in native mode */ public final class ReflectiveClassBuildItem extends MultiBuildItem { private final List<String> className; private final boolean methods; private final boolean fields; private final boolean constructors; private final boolean finalFieldsWritable; private final boolean weak; private final boolean serialization; public ReflectiveClassBuildItem(boolean methods, boolean fields, Class<?>... className) { this(true, methods, fields, className); } public ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, Class<?>... className) { this(constructors, methods, fields, false, false, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, Class<?>... className) { this(constructors, methods, fields, false, false, false, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, boolean serialization, Class<?>... className) { List<String> names = new ArrayList<>(); for (Class<?> i : className) { if (i == null) { throw new NullPointerException(); } names.add(i.getName()); } this.className = names; this.methods = methods; this.fields = fields; this.constructors = constructors; this.finalFieldsWritable = finalFieldsWritable; this.weak = weak; this.serialization = serialization; if (weak) { if (serialization) { throw new RuntimeException("Weak reflection not supported with serialization"); } if (finalFieldsWritable) { throw new RuntimeException("Weak reflection not supported with finalFieldsWritable"); } } } public ReflectiveClassBuildItem(boolean methods, boolean fields, String... className) { this(true, methods, fields, className); } public ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, String... className) { this(constructors, methods, fields, false, false, className); } public ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean serialization, String... className) { this(constructors, methods, fields, false, false, serialization, className); } public static ReflectiveClassBuildItem weakClass(String... className) { return new ReflectiveClassBuildItem(true, true, true, false, true, className); } public static ReflectiveClassBuildItem serializationClass(String... className) { return new ReflectiveClassBuildItem(false, false, false, false, false, true, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, String... className) { this(constructors, methods, fields, finalFieldsWritable, weak, false, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, boolean serialization, String... className) { for (String i : className) { if (i == null) { throw new NullPointerException(); } } this.className = Arrays.asList(className); this.methods = methods; this.fields = fields; this.constructors = constructors; this.finalFieldsWritable = finalFieldsWritable; this.weak = weak; this.serialization = serialization; } public List<String> getClassNames() { return className; } public boolean isMethods() { return methods; } public boolean isFields() { return fields; } public boolean isConstructors() { return constructors; } public boolean areFinalFieldsWritable() { return finalFieldsWritable; } public boolean isWeak() { return weak; } public boolean isSerialization() { return serialization; } public static Builder builder(Class<?>... className) { String[] classNameStrings = stream(className) .map(aClass -> { if (aClass == null) { throw new NullPointerException(); } return aClass.getName(); }) .toArray(String[]::new); return new Builder() .className(classNameStrings); } public static Builder builder(String... className) { return new Builder() .className(className); } public static class Builder { private String[] className; private boolean constructors = true; private boolean methods; private boolean fields; private boolean finalFieldsWritable; private boolean weak; private boolean serialization; private Builder() { } public Builder className(String[] className) { this.className = className; return this; } public Builder constructors(boolean constructors) { this.constructors = constructors; return this; } public Builder methods(boolean methods) { this.methods = methods; return this; } public Builder fields(boolean fields) { this.fields = fields; return this; } public Builder finalFieldsWritable(boolean finalFieldsWritable) { this.finalFieldsWritable = finalFieldsWritable; return this; } public Builder weak(boolean weak) { this.weak = weak; return this; } public Builder serialization(boolean serialize) { this.serialization = serialize; return this; } public ReflectiveClassBuildItem build() { return new ReflectiveClassBuildItem(constructors, methods, fields, finalFieldsWritable, weak, serialization, className); } } }
core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveClassBuildItem.java
package io.quarkus.deployment.builditem.nativeimage; import static java.util.Arrays.stream; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import io.quarkus.builder.item.MultiBuildItem; /** * Used to register a class for reflection in native mode */ public final class ReflectiveClassBuildItem extends MultiBuildItem { private final List<String> className; private final boolean methods; private final boolean fields; private final boolean constructors; private final boolean finalFieldsWritable; private final boolean weak; private final boolean serialization; public ReflectiveClassBuildItem(boolean methods, boolean fields, Class<?>... className) { this(true, methods, fields, className); } public ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, Class<?>... className) { this(constructors, methods, fields, false, false, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, Class<?>... className) { this(constructors, methods, fields, false, false, false, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, boolean serialization, Class<?>... className) { List<String> names = new ArrayList<>(); for (Class<?> i : className) { if (i == null) { throw new NullPointerException(); } names.add(i.getName()); } this.className = names; this.methods = methods; this.fields = fields; this.constructors = constructors; this.finalFieldsWritable = finalFieldsWritable; this.weak = weak; this.serialization = serialization; if (weak) { if (serialization) { throw new RuntimeException("Weak reflection not supported with serialization"); } if (finalFieldsWritable) { throw new RuntimeException("Weak reflection not supported with finalFieldsWritable"); } } } public ReflectiveClassBuildItem(boolean methods, boolean fields, String... className) { this(true, methods, fields, className); } public ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, String... className) { this(constructors, methods, fields, false, false, className); } public ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean serialization, String... className) { this(constructors, methods, fields, false, false, serialization, className); } public static ReflectiveClassBuildItem weakClass(String... className) { return new ReflectiveClassBuildItem(true, true, true, false, true, className); } public static ReflectiveClassBuildItem serializationClass(String... className) { return new ReflectiveClassBuildItem(false, false, false, false, false, true, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, String... className) { this(constructors, methods, fields, finalFieldsWritable, weak, false, className); } private ReflectiveClassBuildItem(boolean constructors, boolean methods, boolean fields, boolean finalFieldsWritable, boolean weak, boolean serialization, String... className) { for (String i : className) { if (i == null) { throw new NullPointerException(); } } this.className = Arrays.asList(className); this.methods = methods; this.fields = fields; this.constructors = constructors; this.finalFieldsWritable = finalFieldsWritable; this.weak = weak; this.serialization = serialization; } public List<String> getClassNames() { return className; } public boolean isMethods() { return methods; } public boolean isFields() { return fields; } public boolean isConstructors() { return constructors; } public boolean areFinalFieldsWritable() { return finalFieldsWritable; } public boolean isWeak() { return weak; } public boolean isSerialization() { return serialization; } public static Builder builder(Class<?>... className) { String[] classNameStrings = stream(className) .map(aClass -> { if (aClass == null) { throw new NullPointerException(); } return aClass.getName(); }) .toArray(String[]::new); return new Builder() .className(classNameStrings); } public static Builder builder(String... className) { return new Builder() .className(className); } public static class Builder { private String[] className; private boolean constructors = true; private boolean methods; private boolean fields; private boolean finalFieldsWritable; private boolean weak; private boolean serialization; private Builder() { } public Builder className(String[] className) { this.className = className; return this; } public Builder constructors(boolean constructors) { this.constructors = constructors; return this; } public Builder methods(boolean methods) { this.methods = methods; return this; } public Builder fields(boolean fields) { this.fields = fields; return this; } public Builder finalFieldsWritable(boolean finalFieldsWritable) { this.finalFieldsWritable = finalFieldsWritable; return this; } public Builder weak(boolean weak) { this.weak = weak; return this; } public Builder serialization(boolean serialize) { this.serialization = serialization; return this; } public ReflectiveClassBuildItem build() { return new ReflectiveClassBuildItem(constructors, methods, fields, finalFieldsWritable, weak, className); } } }
Fix `serialization` flag handling in `ReflectiveClassBuildItem.MultiBuildItem`
core/deployment/src/main/java/io/quarkus/deployment/builditem/nativeimage/ReflectiveClassBuildItem.java
Fix `serialization` flag handling in `ReflectiveClassBuildItem.MultiBuildItem`
Java
apache-2.0
d5c1b5acd63c8447def5672eaf5cd93dd72667f6
0
tbgdn/cloudapp-mp2,tbgdn/cloudapp-mp2,tbgdn/cloudapp-mp2
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import java.io.IOException; import java.util.StringTokenizer; // >>> Don't Change public class OrphanPages extends Configured implements Tool { public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new OrphanPages(), args); System.exit(res); } // <<< Don't Change @Override public int run(String[] args) throws Exception { //TODO Job job = Job.getInstance(this.getConf(), "Orphan Pages"); job.setOutputKeyClass(Integer.class); job.setOutputValueClass(NullWritable.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(IntWritable.class); job.setMapperClass(LinkCountMap.class); job.setReducerClass(OrphanPageReduce.class); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.setJarByClass(TitleCount.class); return job.waitForCompletion(true) ? 0 : 1; } public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> { @Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { //TODO String[] referralAndPages = value.toString().split(":"); if (referralAndPages.length >= 1){ context.write(new IntWritable(Integer.valueOf(referralAndPages[0].trim())), new IntWritable(0)); } if (referralAndPages.length == 2){ String[] referredPages = referralAndPages[1].split(","); for(String referredPage: referredPages){ context.write(new IntWritable(Integer.valueOf(referredPage.trim())), new IntWritable(1)); } } } } public static class OrphanPageReduce extends Reducer<IntWritable, IntWritable, IntWritable, NullWritable> { @Override public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { //TODO int refLinksNum = 0; for(IntWritable page: values){ refLinksNum += page.get(); } if (refLinksNum == 0){ context.write(key, NullWritable.get()); } } } }
src/main/java/OrphanPages.java
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import java.io.IOException; import java.util.StringTokenizer; // >>> Don't Change public class OrphanPages extends Configured implements Tool { public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new OrphanPages(), args); System.exit(res); } // <<< Don't Change @Override public int run(String[] args) throws Exception { //TODO Job job = Job.getInstance(this.getConf(), "Orphan Pages"); job.setOutputKeyClass(Text.class); job.setOutputValueClass(IntWritable.class); job.setMapOutputKeyClass(Text.class); job.setMapOutputValueClass(IntWritable.class); job.setMapperClass(LinkCountMap.class); job.setReducerClass(OrphanPageReduce.class); FileInputFormat.setInputPaths(job, new Path(args[0])); FileOutputFormat.setOutputPath(job, new Path(args[1])); job.setJarByClass(TitleCount.class); return job.waitForCompletion(true) ? 0 : 1; } public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> { @Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { //TODO String[] referralAndPages = value.toString().split(":"); if (referralAndPages.length >= 1){ context.write(new IntWritable(Integer.valueOf(referralAndPages[0].trim())), new IntWritable(0)); } if (referralAndPages.length == 2){ String[] referredPages = referralAndPages[1].split(","); for(String referredPage: referredPages){ context.write(new IntWritable(Integer.valueOf(referredPage.trim())), new IntWritable(1)); } } } } public static class OrphanPageReduce extends Reducer<IntWritable, IntWritable, IntWritable, NullWritable> { @Override public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { //TODO int refLinksNum = 0; for(IntWritable page: values){ refLinksNum += page.get(); } if (refLinksNum == 0){ context.write(key, NullWritable.get()); } } } }
Set the correct output classes
src/main/java/OrphanPages.java
Set the correct output classes
Java
apache-2.0
acc4fc85b87b171597ea79b2745c8a8a8ae04eea
0
florentw/bench,florentw/bench
server/src/test/java/io/amaze/bench/graph/TopologicalSortTest.java
package io.amaze.bench.graph; import org.junit.Before; import org.junit.Test; import java.util.List; import static io.amaze.bench.graph.DirectedGraphTest.genCyclicGraph; import static io.amaze.bench.graph.DirectedGraphTest.genSimpleDiamondGraph; import static io.amaze.bench.graph.TopologicalSort.isAcyclic; import static junit.framework.TestCase.assertTrue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; /** * Created on 2/20/16 * * @author Florent Weber ([email protected]) */ public class TopologicalSortTest { private DirectedGraph<String> graph; @Before public void before() { graph = new DirectedGraph<>(); } @Test(expected = java.lang.NullPointerException.class) public void sort_null_graph() throws CyclicGraphException { TopologicalSort.sort(null); } @Test public void sort_empty_graph() throws CyclicGraphException { List<String> orderedLayers = TopologicalSort.sort(graph); assertTrue(orderedLayers.isEmpty()); } @Test public void single_vertex() throws CyclicGraphException { graph.addNodes("a"); List<String> orderedLayers = TopologicalSort.sort(graph); assertThat(orderedLayers.size(), is(1)); assertTrue(orderedLayers.contains("a")); } @Test public void multiple_vertices() throws CyclicGraphException { graph.addNodes("a"); graph.addNodes("b"); List<String> orderedLayers = TopologicalSort.sort(graph); assertThat(orderedLayers.size(), is(2)); assertTrue(orderedLayers.contains("a")); assertTrue(orderedLayers.contains("b")); } @Test public void simple_directed_acyclic_graph() throws CyclicGraphException { graph.addNodes("b"); graph.addNodes("a"); graph.addEdge("a", "b"); List<String> orderedLayers = TopologicalSort.sort(graph); assertEquals(2, orderedLayers.size()); assertTrue(orderedLayers.get(0).equals("a")); assertTrue(orderedLayers.get(1).equals("b")); } @Test public void complex_graph() throws CyclicGraphException { // node b has two incoming edges graph.addNodes("a"); graph.addNodes("b"); graph.addNodes("c"); graph.addNodes("d"); graph.addNodes("e"); graph.addNodes("f"); graph.addNodes("g"); graph.addNodes("h"); graph.addEdge("a", "b"); graph.addEdge("a", "c"); graph.addEdge("c", "d"); graph.addEdge("d", "b"); graph.addEdge("c", "e"); graph.addEdge("f", "g"); List<String> orderedLayers = TopologicalSort.sort(graph); assertEquals(8, orderedLayers.size()); assertTrue(orderedLayers.indexOf("a") < orderedLayers.indexOf("b")); assertTrue(orderedLayers.indexOf("a") < orderedLayers.indexOf("c")); assertTrue(orderedLayers.indexOf("c") < orderedLayers.indexOf("d")); assertTrue(orderedLayers.indexOf("c") < orderedLayers.indexOf("e")); assertTrue(orderedLayers.indexOf("d") < orderedLayers.indexOf("b")); assertTrue(orderedLayers.indexOf("f") < orderedLayers.indexOf("g")); } @Test(expected = CyclicGraphException.class) public void cyclic_graph_sort_fails() throws CyclicGraphException { genCyclicGraph(graph); TopologicalSort.sort(graph); } @Test public void cyclic_graph_detection() throws CyclicGraphException { genCyclicGraph(graph); assertThat(isAcyclic(graph), is(false)); } @Test public void acyclic_graph_detection() throws CyclicGraphException { genSimpleDiamondGraph(graph); assertThat(isAcyclic(graph), is(true)); } }
Delete orphan file.
server/src/test/java/io/amaze/bench/graph/TopologicalSortTest.java
Delete orphan file.
Java
apache-2.0
7805ac59bd7d79f4165b720cf08ce50178e2dbb8
0
mnki/camel,gautric/camel,davidwilliams1978/camel,nikvaessen/camel,woj-i/camel,kevinearls/camel,noelo/camel,mike-kukla/camel,bhaveshdt/camel,onders86/camel,chanakaudaya/camel,anoordover/camel,bdecoste/camel,NetNow/camel,lasombra/camel,borcsokj/camel,pax95/camel,igarashitm/camel,logzio/camel,koscejev/camel,dvankleef/camel,lasombra/camel,ekprayas/camel,arnaud-deprez/camel,kevinearls/camel,dsimansk/camel,maschmid/camel,allancth/camel,borcsokj/camel,pax95/camel,jarst/camel,mgyongyosi/camel,tdiesler/camel,yury-vashchyla/camel,stravag/camel,atoulme/camel,noelo/camel,yuruki/camel,nikvaessen/camel,zregvart/camel,NetNow/camel,ge0ffrey/camel,mike-kukla/camel,ramonmaruko/camel,manuelh9r/camel,nikhilvibhav/camel,rparree/camel,ekprayas/camel,noelo/camel,pmoerenhout/camel,manuelh9r/camel,bgaudaen/camel,ge0ffrey/camel,duro1/camel,askannon/camel,tdiesler/camel,sirlatrom/camel,curso007/camel,YMartsynkevych/camel,bhaveshdt/camel,iweiss/camel,woj-i/camel,chanakaudaya/camel,rmarting/camel,punkhorn/camel-upstream,Thopap/camel,stravag/camel,engagepoint/camel,MrCoder/camel,atoulme/camel,mzapletal/camel,isururanawaka/camel,jpav/camel,trohovsky/camel,mcollovati/camel,rparree/camel,jlpedrosa/camel,driseley/camel,CodeSmell/camel,tdiesler/camel,gautric/camel,lburgazzoli/apache-camel,pplatek/camel,DariusX/camel,gyc567/camel,CandleCandle/camel,jarst/camel,pplatek/camel,erwelch/camel,ekprayas/camel,dmvolod/camel,bdecoste/camel,gautric/camel,scranton/camel,pmoerenhout/camel,sverkera/camel,grange74/camel,hqstevenson/camel,sirlatrom/camel,NickCis/camel,davidkarlsen/camel,dmvolod/camel,partis/camel,isavin/camel,aaronwalker/camel,dkhanolkar/camel,nicolaferraro/camel,cunningt/camel,Fabryprog/camel,josefkarasek/camel,MrCoder/camel,yogamaha/camel,atoulme/camel,sverkera/camel,dpocock/camel,ramonmaruko/camel,coderczp/camel,driseley/camel,stalet/camel,lburgazzoli/camel,jamesnetherton/camel,NetNow/camel,tadayosi/camel,YMartsynkevych/camel,grange74/camel,chirino/camel,DariusX/camel,engagepoint/camel,koscejev/camel,josefkarasek/camel,snurmine/camel,pmoerenhout/camel,bhaveshdt/camel,rparree/camel,haku/camel,oscerd/camel,yogamaha/camel,yuruki/camel,w4tson/camel,cunningt/camel,satishgummadelli/camel,gnodet/camel,anoordover/camel,partis/camel,edigrid/camel,akhettar/camel,woj-i/camel,apache/camel,lowwool/camel,ramonmaruko/camel,ullgren/camel,koscejev/camel,royopa/camel,johnpoth/camel,trohovsky/camel,allancth/camel,grange74/camel,noelo/camel,isavin/camel,RohanHart/camel,sebi-hgdata/camel,drsquidop/camel,ssharma/camel,JYBESSON/camel,mgyongyosi/camel,mzapletal/camel,yuruki/camel,bgaudaen/camel,jmandawg/camel,snurmine/camel,iweiss/camel,royopa/camel,eformat/camel,cunningt/camel,bfitzpat/camel,isururanawaka/camel,gautric/camel,lburgazzoli/apache-camel,ssharma/camel,onders86/camel,onders86/camel,jmandawg/camel,gilfernandes/camel,jkorab/camel,nicolaferraro/camel,chanakaudaya/camel,erwelch/camel,apache/camel,akhettar/camel,erwelch/camel,yuruki/camel,pkletsko/camel,sverkera/camel,ssharma/camel,duro1/camel,lburgazzoli/apache-camel,iweiss/camel,JYBESSON/camel,kevinearls/camel,tlehoux/camel,scranton/camel,onders86/camel,NickCis/camel,jamesnetherton/camel,alvinkwekel/camel,snurmine/camel,curso007/camel,jmandawg/camel,sabre1041/camel,arnaud-deprez/camel,davidkarlsen/camel,CodeSmell/camel,mnki/camel,dmvolod/camel,veithen/camel,aaronwalker/camel,mohanaraosv/camel,JYBESSON/camel,davidwilliams1978/camel,anoordover/camel,veithen/camel,dkhanolkar/camel,MohammedHammam/camel,mohanaraosv/camel,davidwilliams1978/camel,NetNow/camel,gautric/camel,maschmid/camel,mcollovati/camel,skinzer/camel,tlehoux/camel,yury-vashchyla/camel,isururanawaka/camel,manuelh9r/camel,anton-k11/camel,alvinkwekel/camel,eformat/camel,jarst/camel,pax95/camel,atoulme/camel,acartapanis/camel,qst-jdc-labs/camel,bhaveshdt/camel,christophd/camel,acartapanis/camel,oscerd/camel,pax95/camel,trohovsky/camel,royopa/camel,jkorab/camel,sverkera/camel,bfitzpat/camel,lowwool/camel,salikjan/camel,nboukhed/camel,eformat/camel,gilfernandes/camel,tarilabs/camel,royopa/camel,satishgummadelli/camel,dkhanolkar/camel,FingolfinTEK/camel,MohammedHammam/camel,punkhorn/camel-upstream,yury-vashchyla/camel,stravag/camel,nboukhed/camel,sirlatrom/camel,allancth/camel,engagepoint/camel,w4tson/camel,snadakuduru/camel,skinzer/camel,davidkarlsen/camel,eformat/camel,sabre1041/camel,CandleCandle/camel,grange74/camel,tkopczynski/camel,edigrid/camel,royopa/camel,askannon/camel,oscerd/camel,sebi-hgdata/camel,igarashitm/camel,borcsokj/camel,royopa/camel,lasombra/camel,anton-k11/camel,isururanawaka/camel,arnaud-deprez/camel,skinzer/camel,trohovsky/camel,lowwool/camel,skinzer/camel,lasombra/camel,qst-jdc-labs/camel,jarst/camel,christophd/camel,curso007/camel,pplatek/camel,logzio/camel,christophd/camel,sverkera/camel,dkhanolkar/camel,yogamaha/camel,mnki/camel,chirino/camel,woj-i/camel,tadayosi/camel,stalet/camel,dpocock/camel,sverkera/camel,aaronwalker/camel,yury-vashchyla/camel,tarilabs/camel,noelo/camel,pplatek/camel,erwelch/camel,allancth/camel,jameszkw/camel,chanakaudaya/camel,logzio/camel,partis/camel,isururanawaka/camel,dpocock/camel,tlehoux/camel,stalet/camel,nboukhed/camel,DariusX/camel,tadayosi/camel,atoulme/camel,jollygeorge/camel,dpocock/camel,prashant2402/camel,johnpoth/camel,mgyongyosi/camel,johnpoth/camel,jmandawg/camel,jarst/camel,oscerd/camel,hqstevenson/camel,tdiesler/camel,gilfernandes/camel,RohanHart/camel,sabre1041/camel,drsquidop/camel,oalles/camel,YoshikiHigo/camel,bdecoste/camel,nikhilvibhav/camel,isavin/camel,borcsokj/camel,gilfernandes/camel,nikhilvibhav/camel,skinzer/camel,pax95/camel,coderczp/camel,johnpoth/camel,Thopap/camel,Fabryprog/camel,RohanHart/camel,JYBESSON/camel,lburgazzoli/camel,yuruki/camel,lburgazzoli/apache-camel,jlpedrosa/camel,jameszkw/camel,askannon/camel,dvankleef/camel,jonmcewen/camel,YMartsynkevych/camel,jonmcewen/camel,ekprayas/camel,isavin/camel,anton-k11/camel,pax95/camel,engagepoint/camel,drsquidop/camel,adessaigne/camel,rmarting/camel,cunningt/camel,YMartsynkevych/camel,driseley/camel,grgrzybek/camel,YoshikiHigo/camel,lburgazzoli/camel,yuruki/camel,bfitzpat/camel,josefkarasek/camel,qst-jdc-labs/camel,coderczp/camel,tdiesler/camel,cunningt/camel,nicolaferraro/camel,snadakuduru/camel,jlpedrosa/camel,haku/camel,brreitme/camel,yury-vashchyla/camel,ge0ffrey/camel,mohanaraosv/camel,curso007/camel,pmoerenhout/camel,lasombra/camel,pkletsko/camel,mgyongyosi/camel,nikvaessen/camel,woj-i/camel,YoshikiHigo/camel,anoordover/camel,driseley/camel,jkorab/camel,YMartsynkevych/camel,satishgummadelli/camel,bgaudaen/camel,oalles/camel,jkorab/camel,igarashitm/camel,jpav/camel,objectiser/camel,FingolfinTEK/camel,dvankleef/camel,JYBESSON/camel,alvinkwekel/camel,dkhanolkar/camel,grgrzybek/camel,nicolaferraro/camel,joakibj/camel,prashant2402/camel,rparree/camel,neoramon/camel,mgyongyosi/camel,eformat/camel,sebi-hgdata/camel,gnodet/camel,arnaud-deprez/camel,igarashitm/camel,bdecoste/camel,chirino/camel,hqstevenson/camel,manuelh9r/camel,jarst/camel,mike-kukla/camel,yogamaha/camel,MohammedHammam/camel,jollygeorge/camel,CodeSmell/camel,tarilabs/camel,adessaigne/camel,satishgummadelli/camel,atoulme/camel,duro1/camel,scranton/camel,neoramon/camel,snurmine/camel,mnki/camel,joakibj/camel,manuelh9r/camel,tkopczynski/camel,prashant2402/camel,veithen/camel,bfitzpat/camel,acartapanis/camel,nboukhed/camel,NetNow/camel,salikjan/camel,joakibj/camel,allancth/camel,ullgren/camel,gilfernandes/camel,maschmid/camel,kevinearls/camel,yogamaha/camel,sirlatrom/camel,rparree/camel,MrCoder/camel,lburgazzoli/camel,anoordover/camel,dvankleef/camel,christophd/camel,jpav/camel,erwelch/camel,christophd/camel,mnki/camel,gnodet/camel,gnodet/camel,jlpedrosa/camel,edigrid/camel,engagepoint/camel,qst-jdc-labs/camel,YoshikiHigo/camel,jonmcewen/camel,zregvart/camel,acartapanis/camel,scranton/camel,tarilabs/camel,pmoerenhout/camel,gilfernandes/camel,joakibj/camel,mike-kukla/camel,iweiss/camel,jlpedrosa/camel,jameszkw/camel,sebi-hgdata/camel,tlehoux/camel,sirlatrom/camel,mzapletal/camel,yury-vashchyla/camel,bgaudaen/camel,NickCis/camel,nikhilvibhav/camel,pplatek/camel,sirlatrom/camel,ullgren/camel,mohanaraosv/camel,zregvart/camel,apache/camel,jkorab/camel,nikvaessen/camel,pplatek/camel,coderczp/camel,oalles/camel,tkopczynski/camel,lburgazzoli/apache-camel,trohovsky/camel,anton-k11/camel,dpocock/camel,FingolfinTEK/camel,logzio/camel,manuelh9r/camel,akhettar/camel,ramonmaruko/camel,chirino/camel,ramonmaruko/camel,grange74/camel,partis/camel,drsquidop/camel,pmoerenhout/camel,rmarting/camel,CandleCandle/camel,stravag/camel,bhaveshdt/camel,NickCis/camel,maschmid/camel,skinzer/camel,pkletsko/camel,MrCoder/camel,koscejev/camel,drsquidop/camel,dsimansk/camel,logzio/camel,haku/camel,qst-jdc-labs/camel,dvankleef/camel,objectiser/camel,prashant2402/camel,oscerd/camel,jamesnetherton/camel,gyc567/camel,joakibj/camel,curso007/camel,isavin/camel,grgrzybek/camel,FingolfinTEK/camel,tarilabs/camel,sabre1041/camel,stalet/camel,grgrzybek/camel,logzio/camel,qst-jdc-labs/camel,maschmid/camel,jkorab/camel,haku/camel,oalles/camel,tlehoux/camel,gyc567/camel,sabre1041/camel,hqstevenson/camel,jollygeorge/camel,mohanaraosv/camel,nikvaessen/camel,igarashitm/camel,apache/camel,stalet/camel,partis/camel,dpocock/camel,prashant2402/camel,haku/camel,Thopap/camel,onders86/camel,ssharma/camel,oalles/camel,tkopczynski/camel,lburgazzoli/camel,edigrid/camel,Thopap/camel,gautric/camel,isavin/camel,jmandawg/camel,YMartsynkevych/camel,duro1/camel,anton-k11/camel,koscejev/camel,jamesnetherton/camel,mgyongyosi/camel,anton-k11/camel,snadakuduru/camel,mcollovati/camel,mcollovati/camel,allancth/camel,dmvolod/camel,prashant2402/camel,bdecoste/camel,jonmcewen/camel,lowwool/camel,davidwilliams1978/camel,adessaigne/camel,neoramon/camel,brreitme/camel,ekprayas/camel,nboukhed/camel,chanakaudaya/camel,dsimansk/camel,bdecoste/camel,lasombra/camel,tdiesler/camel,oalles/camel,akhettar/camel,partis/camel,Thopap/camel,davidkarlsen/camel,iweiss/camel,ssharma/camel,RohanHart/camel,w4tson/camel,acartapanis/camel,pkletsko/camel,chanakaudaya/camel,edigrid/camel,johnpoth/camel,jameszkw/camel,brreitme/camel,dvankleef/camel,brreitme/camel,onders86/camel,apache/camel,jpav/camel,punkhorn/camel-upstream,tadayosi/camel,chirino/camel,MohammedHammam/camel,johnpoth/camel,logzio/camel,scranton/camel,lburgazzoli/camel,w4tson/camel,haku/camel,snurmine/camel,gyc567/camel,alvinkwekel/camel,sebi-hgdata/camel,brreitme/camel,sabre1041/camel,gyc567/camel,gnodet/camel,curso007/camel,satishgummadelli/camel,jpav/camel,snadakuduru/camel,satishgummadelli/camel,driseley/camel,w4tson/camel,cunningt/camel,rmarting/camel,NickCis/camel,eformat/camel,bfitzpat/camel,YoshikiHigo/camel,ramonmaruko/camel,mike-kukla/camel,davidwilliams1978/camel,yogamaha/camel,veithen/camel,maschmid/camel,ge0ffrey/camel,hqstevenson/camel,Fabryprog/camel,askannon/camel,sebi-hgdata/camel,borcsokj/camel,neoramon/camel,apache/camel,jamesnetherton/camel,NetNow/camel,YoshikiHigo/camel,dmvolod/camel,hqstevenson/camel,tkopczynski/camel,tkopczynski/camel,objectiser/camel,mohanaraosv/camel,adessaigne/camel,CandleCandle/camel,acartapanis/camel,dsimansk/camel,davidwilliams1978/camel,erwelch/camel,jmandawg/camel,w4tson/camel,mzapletal/camel,grgrzybek/camel,CandleCandle/camel,gyc567/camel,driseley/camel,JYBESSON/camel,kevinearls/camel,pplatek/camel,CandleCandle/camel,christophd/camel,drsquidop/camel,veithen/camel,jollygeorge/camel,mzapletal/camel,lowwool/camel,adessaigne/camel,ekprayas/camel,rparree/camel,MrCoder/camel,bgaudaen/camel,rmarting/camel,coderczp/camel,bgaudaen/camel,DariusX/camel,dsimansk/camel,igarashitm/camel,neoramon/camel,jlpedrosa/camel,NickCis/camel,trohovsky/camel,mzapletal/camel,RohanHart/camel,josefkarasek/camel,tarilabs/camel,dsimansk/camel,borcsokj/camel,dmvolod/camel,koscejev/camel,jonmcewen/camel,Thopap/camel,edigrid/camel,askannon/camel,arnaud-deprez/camel,neoramon/camel,lburgazzoli/apache-camel,coderczp/camel,jameszkw/camel,grgrzybek/camel,CodeSmell/camel,oscerd/camel,isururanawaka/camel,jameszkw/camel,snurmine/camel,josefkarasek/camel,brreitme/camel,mike-kukla/camel,objectiser/camel,noelo/camel,pkletsko/camel,pkletsko/camel,ssharma/camel,duro1/camel,ge0ffrey/camel,snadakuduru/camel,akhettar/camel,aaronwalker/camel,tlehoux/camel,scranton/camel,woj-i/camel,bhaveshdt/camel,josefkarasek/camel,aaronwalker/camel,veithen/camel,dkhanolkar/camel,chirino/camel,FingolfinTEK/camel,jonmcewen/camel,jpav/camel,stravag/camel,zregvart/camel,RohanHart/camel,akhettar/camel,FingolfinTEK/camel,ge0ffrey/camel,askannon/camel,anoordover/camel,jollygeorge/camel,duro1/camel,stalet/camel,rmarting/camel,tadayosi/camel,aaronwalker/camel,nboukhed/camel,joakibj/camel,jollygeorge/camel,nikvaessen/camel,lowwool/camel,snadakuduru/camel,MohammedHammam/camel,tadayosi/camel,kevinearls/camel,stravag/camel,Fabryprog/camel,MohammedHammam/camel,mnki/camel,ullgren/camel,jamesnetherton/camel,bfitzpat/camel,iweiss/camel,adessaigne/camel,arnaud-deprez/camel,grange74/camel,punkhorn/camel-upstream,MrCoder/camel
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder; import java.util.ArrayList; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.Predicate; import org.apache.camel.Route; import org.apache.camel.Routes; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.model.ChoiceType; import org.apache.camel.model.ExceptionType; import org.apache.camel.model.InterceptType; import org.apache.camel.model.ProcessorType; import org.apache.camel.model.RouteType; import org.apache.camel.model.RoutesType; import org.apache.camel.processor.DelegateProcessor; import org.apache.camel.processor.interceptor.StreamCachingInterceptor; /** * A <a href="http://activemq.apache.org/camel/dsl.html">Java DSL</a> which is * used to build {@link Route} instances in a {@link CamelContext} for smart routing. * * @version $Revision$ */ public abstract class RouteBuilder extends BuilderSupport implements Routes { private AtomicBoolean initialized = new AtomicBoolean(false); private RoutesType routeCollection = new RoutesType(); private List<Route> routes = new ArrayList<Route>(); public RouteBuilder() { this(null); } public RouteBuilder(CamelContext context) { super(context); } @Override public String toString() { return routeCollection.toString(); } /** * Called on initialization to to build the required destinationBuilders */ public abstract void configure() throws Exception; /** * Creates a new route from the given URI input */ public RouteType from(String uri) { RouteType answer = routeCollection.from(uri); configureRoute(answer); return answer; } /** * Creates a new route from the given endpoint */ public RouteType from(Endpoint endpoint) { RouteType answer = routeCollection.from(endpoint); configureRoute(answer); return answer; } /** * Installs the given error handler builder * * @param errorHandlerBuilder the error handler to be used by default for * all child routes * @return the current builder with the error handler configured */ public RouteBuilder errorHandler(ErrorHandlerBuilder errorHandlerBuilder) { setErrorHandlerBuilder(errorHandlerBuilder); return this; } /** * Configures whether or not the error handler is inherited by every * processing node (or just the top most one) * * @param value the flag as to whether error handlers should be inherited or not * @return the current builder */ public RouteBuilder inheritErrorHandler(boolean value) { routeCollection.setInheritErrorHandlerFlag(value); return this; } /** * Adds the given interceptor to this route */ public RouteBuilder intercept(DelegateProcessor interceptor) { routeCollection.intercept(interceptor); return this; } /** * Adds a route for an interceptor; use the {@link ProcessorType#proceed()} method * to continue processing the underlying route being intercepted. */ public InterceptType intercept() { return routeCollection.intercept(); } /** * Applies a route for an interceptor if the given predicate is true * otherwise the interceptor route is not applied */ public ChoiceType intercept(Predicate predicate) { return routeCollection.intercept(predicate); } /** * Adds an exception handler route for the given exception type */ public ExceptionType exception(Class exceptionType) { return routeCollection.exception(exceptionType); } // Properties // ----------------------------------------------------------------------- public CamelContext getContext() { CamelContext context = super.getContext(); if (context == null) { context = createContainer(); setContext(context); } return context; } /** * Uses {@link org.apache.camel.CamelContext#getRoutes()} to return the routes in the context. */ public List<Route> getRouteList() throws Exception { checkInitialized(); return routes; } @Override public void setInheritErrorHandler(boolean inheritErrorHandler) { super.setInheritErrorHandler(inheritErrorHandler); routeCollection.setInheritErrorHandlerFlag(inheritErrorHandler); } @Override public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) { super.setErrorHandlerBuilder(errorHandlerBuilder); routeCollection.setErrorHandlerBuilder(getErrorHandlerBuilder()); } // Implementation methods // ----------------------------------------------------------------------- protected void checkInitialized() throws Exception { if (initialized.compareAndSet(false, true)) { // Set the CamelContext ErrorHandler here CamelContext camelContext = getContext(); if (camelContext.getErrorHandlerBuilder() != null) { setErrorHandlerBuilder(camelContext.getErrorHandlerBuilder()); } configure(); populateRoutes(routes); } } protected void populateRoutes(List<Route> routes) throws Exception { CamelContext camelContext = getContext(); if (camelContext == null) { throw new IllegalArgumentException("No CamelContext has been injected!"); } routeCollection.setCamelContext(camelContext); camelContext.addRouteDefinitions(routeCollection.getRoutes()); } public void setRouteCollection(RoutesType routeCollection) { this.routeCollection = routeCollection; } public RoutesType getRouteCollection() { return this.routeCollection; } /** * Completely disable stream caching for all routes being defined in the same RouteBuilder after this. */ public void noStreamCaching() { StreamCachingInterceptor.noStreamCaching(routeCollection.getInterceptors()); } /** * Enable stream caching for all routes being defined in the same RouteBuilder after this call. */ public void streamCaching() { routeCollection.intercept(new StreamCachingInterceptor()); } /** * Factory method */ protected CamelContext createContainer() { return new DefaultCamelContext(); } protected void configureRoute(RouteType route) { route.setGroup(getClass().getName()); } protected void addRoutes(Routes routes) throws Exception { getContext().addRoutes(routes); } }
camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.builder; import java.util.List; import java.util.concurrent.atomic.AtomicBoolean; import org.apache.camel.CamelContext; import org.apache.camel.Endpoint; import org.apache.camel.Predicate; import org.apache.camel.Route; import org.apache.camel.Routes; import org.apache.camel.impl.DefaultCamelContext; import org.apache.camel.model.ChoiceType; import org.apache.camel.model.ExceptionType; import org.apache.camel.model.InterceptType; import org.apache.camel.model.ProcessorType; import org.apache.camel.model.RouteType; import org.apache.camel.model.RoutesType; import org.apache.camel.processor.DelegateProcessor; import org.apache.camel.processor.interceptor.StreamCachingInterceptor; /** * A <a href="http://activemq.apache.org/camel/dsl.html">Java DSL</a> which is * used to build {@link Route} instances in a {@link CamelContext} for smart routing. * * @version $Revision$ */ public abstract class RouteBuilder extends BuilderSupport implements Routes { private AtomicBoolean initialized = new AtomicBoolean(false); private RoutesType routeCollection = new RoutesType(); public RouteBuilder() { this(null); } public RouteBuilder(CamelContext context) { super(context); } @Override public String toString() { return routeCollection.toString(); } /** * Called on initialization to to build the required destinationBuilders */ public abstract void configure() throws Exception; /** * Creates a new route from the given URI input */ public RouteType from(String uri) { RouteType answer = routeCollection.from(uri); configureRoute(answer); return answer; } /** * Creates a new route from the given endpoint */ public RouteType from(Endpoint endpoint) { RouteType answer = routeCollection.from(endpoint); configureRoute(answer); return answer; } /** * Installs the given error handler builder * * @param errorHandlerBuilder the error handler to be used by default for * all child routes * @return the current builder with the error handler configured */ public RouteBuilder errorHandler(ErrorHandlerBuilder errorHandlerBuilder) { setErrorHandlerBuilder(errorHandlerBuilder); return this; } /** * Configures whether or not the error handler is inherited by every * processing node (or just the top most one) * * @param value the flag as to whether error handlers should be inherited or not * @return the current builder */ public RouteBuilder inheritErrorHandler(boolean value) { routeCollection.setInheritErrorHandlerFlag(value); return this; } /** * Adds the given interceptor to this route */ public RouteBuilder intercept(DelegateProcessor interceptor) { routeCollection.intercept(interceptor); return this; } /** * Adds a route for an interceptor; use the {@link ProcessorType#proceed()} method * to continue processing the underlying route being intercepted. */ public InterceptType intercept() { return routeCollection.intercept(); } /** * Applies a route for an interceptor if the given predicate is true * otherwise the interceptor route is not applied */ public ChoiceType intercept(Predicate predicate) { return routeCollection.intercept(predicate); } /** * Adds an exception handler route for the given exception type */ public ExceptionType exception(Class exceptionType) { return routeCollection.exception(exceptionType); } // Properties // ----------------------------------------------------------------------- public CamelContext getContext() { CamelContext context = super.getContext(); if (context == null) { context = createContainer(); setContext(context); } return context; } /** * Uses {@link org.apache.camel.CamelContext#getRoutes()} to return the routes in the context. */ public List<Route> getRouteList() throws Exception { checkInitialized(); return null; } @Override public void setInheritErrorHandler(boolean inheritErrorHandler) { super.setInheritErrorHandler(inheritErrorHandler); routeCollection.setInheritErrorHandlerFlag(inheritErrorHandler); } @Override public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) { super.setErrorHandlerBuilder(errorHandlerBuilder); routeCollection.setErrorHandlerBuilder(getErrorHandlerBuilder()); } // Implementation methods // ----------------------------------------------------------------------- protected void checkInitialized() throws Exception { if (initialized.compareAndSet(false, true)) { // Set the CamelContext ErrorHandler here CamelContext camelContext = getContext(); if (camelContext.getErrorHandlerBuilder() != null) { setErrorHandlerBuilder(camelContext.getErrorHandlerBuilder()); } configure(); populateRoutes(); } } protected void populateRoutes() throws Exception { CamelContext camelContext = getContext(); if (camelContext == null) { throw new IllegalArgumentException("No CamelContext has been injected!"); } routeCollection.setCamelContext(camelContext); camelContext.addRouteDefinitions(routeCollection.getRoutes()); } public void setRouteCollection(RoutesType routeCollection) { this.routeCollection = routeCollection; } public RoutesType getRouteCollection() { return this.routeCollection; } /** * Completely disable stream caching for all routes being defined in the same RouteBuilder after this. */ public void noStreamCaching() { StreamCachingInterceptor.noStreamCaching(routeCollection.getInterceptors()); } /** * Enable stream caching for all routes being defined in the same RouteBuilder after this call. */ public void streamCaching() { routeCollection.intercept(new StreamCachingInterceptor()); } /** * Factory method */ protected CamelContext createContainer() { return new DefaultCamelContext(); } protected void configureRoute(RouteType route) { route.setGroup(getClass().getName()); } protected void addRoutes(Routes routes) throws Exception { getContext().addRoutes(routes); } }
CAMEL-919: Reverted back the routes, that is used by BAM. Thanks Willem. git-svn-id: e3ccc80b644512be24afa6caf639b2d1f1969354@697666 13f79535-47bb-0310-9956-ffa450edef68
camel-core/src/main/java/org/apache/camel/builder/RouteBuilder.java
CAMEL-919: Reverted back the routes, that is used by BAM. Thanks Willem.
Java
apache-2.0
d3d8972af0a73d915e9f77f73d705517322e5b16
0
trustin/armeria,trustin/armeria,line/armeria,anuraaga/armeria,anuraaga/armeria,anuraaga/armeria,kojilin/armeria,imasahiro/armeria,trustin/armeria,trustin/armeria,anuraaga/armeria,kojilin/armeria,kojilin/armeria,dongjinleekr/armeria,minwoox/armeria,imasahiro/armeria,dongjinleekr/armeria,kojilin/armeria,dongjinleekr/armeria,line/armeria,jmostella/armeria,line/armeria,jmostella/armeria,jmostella/armeria,trustin/armeria,jmostella/armeria,minwoox/armeria,line/armeria,trustin/armeria,dongjinleekr/armeria,kojilin/armeria,imasahiro/armeria,anuraaga/armeria,line/armeria,kojilin/armeria,minwoox/armeria,jmostella/armeria,imasahiro/armeria,minwoox/armeria,dongjinleekr/armeria,minwoox/armeria,anuraaga/armeria,minwoox/armeria,imasahiro/armeria,line/armeria
/* * Copyright 2015 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ /* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.common; import static java.util.Objects.requireNonNull; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Map; import com.google.common.base.Ascii; import com.google.common.collect.ImmutableMap; import io.netty.util.AsciiString; /** * Standard HTTP header names. * * <p>These are all defined as lowercase to support HTTP/2 requirements while also not * violating HTTP/1.x requirements. New header names should always be lowercase. */ public final class HttpHeaderNames { // Pseudo-headers /** * {@code ":method"}. */ public static final AsciiString METHOD = new AsciiString(":method"); /** * {@code ":scheme"}. */ public static final AsciiString SCHEME = new AsciiString(":scheme"); /** * {@code ":authority"}. */ public static final AsciiString AUTHORITY = new AsciiString(":authority"); /** * {@code ":path"}. */ public static final AsciiString PATH = new AsciiString(":path"); /** * {@code ":status"}. */ public static final AsciiString STATUS = new AsciiString(":status"); // Ordinary headers /** * {@code "accept"}. */ public static final AsciiString ACCEPT = new AsciiString("accept"); /** * {@code "accept-charset"}. */ public static final AsciiString ACCEPT_CHARSET = new AsciiString("accept-charset"); /** * {@code "accept-encoding"}. */ public static final AsciiString ACCEPT_ENCODING = new AsciiString("accept-encoding"); /** * {@code "accept-language"}. */ public static final AsciiString ACCEPT_LANGUAGE = new AsciiString("accept-language"); /** * {@code "accept-ranges"}. */ public static final AsciiString ACCEPT_RANGES = new AsciiString("accept-ranges"); /** * {@code "accept-patch"}. */ public static final AsciiString ACCEPT_PATCH = new AsciiString("accept-patch"); /** * {@code "access-control-allow-credentials"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_CREDENTIALS = new AsciiString("access-control-allow-credentials"); /** * {@code "access-control-allow-headers"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_HEADERS = new AsciiString("access-control-allow-headers"); /** * {@code "access-control-allow-methods"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_METHODS = new AsciiString("access-control-allow-methods"); /** * {@code "access-control-allow-origin"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_ORIGIN = new AsciiString("access-control-allow-origin"); /** * {@code "access-control-expose-headers"}. */ public static final AsciiString ACCESS_CONTROL_EXPOSE_HEADERS = new AsciiString("access-control-expose-headers"); /** * {@code "access-control-max-age"}. */ public static final AsciiString ACCESS_CONTROL_MAX_AGE = new AsciiString("access-control-max-age"); /** * {@code "access-control-request-headers"}. */ public static final AsciiString ACCESS_CONTROL_REQUEST_HEADERS = new AsciiString("access-control-request-headers"); /** * {@code "access-control-request-method"}. */ public static final AsciiString ACCESS_CONTROL_REQUEST_METHOD = new AsciiString("access-control-request-method"); /** * {@code "age"}. */ public static final AsciiString AGE = new AsciiString("age"); /** * {@code "allow"}. */ public static final AsciiString ALLOW = new AsciiString("allow"); /** * {@code "authorization"}. */ public static final AsciiString AUTHORIZATION = new AsciiString("authorization"); /** * {@code "cache-control"}. */ public static final AsciiString CACHE_CONTROL = new AsciiString("cache-control"); /** * {@code "connection"}. */ public static final AsciiString CONNECTION = new AsciiString("connection"); /** * {@code "content-base"}. */ public static final AsciiString CONTENT_BASE = new AsciiString("content-base"); /** * {@code "content-disposition"}. */ public static final AsciiString CONTENT_DISPOSITION = new AsciiString("content-disposition"); /** * {@code "content-encoding"}. */ public static final AsciiString CONTENT_ENCODING = new AsciiString("content-encoding"); /** * {@code "content-language"}. */ public static final AsciiString CONTENT_LANGUAGE = new AsciiString("content-language"); /** * {@code "content-length"}. */ public static final AsciiString CONTENT_LENGTH = new AsciiString("content-length"); /** * {@code "content-location"}. */ public static final AsciiString CONTENT_LOCATION = new AsciiString("content-location"); /** * {@code "content-md5"}. */ public static final AsciiString CONTENT_MD5 = new AsciiString("content-md5"); /** * {@code "content-range"}. */ public static final AsciiString CONTENT_RANGE = new AsciiString("content-range"); /** * {@code "content-security-policy"}. */ public static final AsciiString CONTENT_SECURITY_POLICY = new AsciiString("content-security-policy"); /** * {@code "content-transfer-encoding"}. */ public static final AsciiString CONTENT_TRANSFER_ENCODING = new AsciiString("content-transfer-encoding"); /** * {@code "content-type"}. */ public static final AsciiString CONTENT_TYPE = new AsciiString("content-type"); /** * {@code "cookie"}. */ public static final AsciiString COOKIE = new AsciiString("cookie"); /** * {@code "date"}. */ public static final AsciiString DATE = new AsciiString("date"); /** * {@code "etag"}. */ public static final AsciiString ETAG = new AsciiString("etag"); /** * {@code "expect"}. */ public static final AsciiString EXPECT = new AsciiString("expect"); /** * {@code "expires"}. */ public static final AsciiString EXPIRES = new AsciiString("expires"); /** * {@code "from"}. */ public static final AsciiString FROM = new AsciiString("from"); /** * {@code "host"}. */ public static final AsciiString HOST = new AsciiString("host"); /** * {@code "if-match"}. */ public static final AsciiString IF_MATCH = new AsciiString("if-match"); /** * {@code "if-modified-since"}. */ public static final AsciiString IF_MODIFIED_SINCE = new AsciiString("if-modified-since"); /** * {@code "if-none-match"}. */ public static final AsciiString IF_NONE_MATCH = new AsciiString("if-none-match"); /** * {@code "if-range"}. */ public static final AsciiString IF_RANGE = new AsciiString("if-range"); /** * {@code "if-unmodified-since"}. */ public static final AsciiString IF_UNMODIFIED_SINCE = new AsciiString("if-unmodified-since"); /** * {@code "keep-alive"} * * @deprecated Use {@link #CONNECTION} instead. */ @Deprecated public static final AsciiString KEEP_ALIVE = new AsciiString("keep-alive"); /** * {@code "last-modified"}. */ public static final AsciiString LAST_MODIFIED = new AsciiString("last-modified"); /** * {@code "location"}. */ public static final AsciiString LOCATION = new AsciiString("location"); /** * {@code "max-forwards"}. */ public static final AsciiString MAX_FORWARDS = new AsciiString("max-forwards"); /** * {@code "origin"}. */ public static final AsciiString ORIGIN = new AsciiString("origin"); /** * {@code "pragma"}. */ public static final AsciiString PRAGMA = new AsciiString("pragma"); /** * {@code "proxy-authenticate"}. */ public static final AsciiString PROXY_AUTHENTICATE = new AsciiString("proxy-authenticate"); /** * {@code "proxy-authorization"}. */ public static final AsciiString PROXY_AUTHORIZATION = new AsciiString("proxy-authorization"); /** * {@code "proxy-connection"} * * @deprecated Use {@link #CONNECTION} instead. */ @Deprecated public static final AsciiString PROXY_CONNECTION = new AsciiString("proxy-connection"); /** * {@code "range"}. */ public static final AsciiString RANGE = new AsciiString("range"); /** * {@code "referer"}. */ public static final AsciiString REFERER = new AsciiString("referer"); /** * {@code "retry-after"}. */ public static final AsciiString RETRY_AFTER = new AsciiString("retry-after"); /** * {@code "sec-websocket-key1"}. */ public static final AsciiString SEC_WEBSOCKET_KEY1 = new AsciiString("sec-websocket-key1"); /** * {@code "sec-websocket-key2"}. */ public static final AsciiString SEC_WEBSOCKET_KEY2 = new AsciiString("sec-websocket-key2"); /** * {@code "sec-websocket-location"}. */ public static final AsciiString SEC_WEBSOCKET_LOCATION = new AsciiString("sec-websocket-location"); /** * {@code "sec-websocket-origin"}. */ public static final AsciiString SEC_WEBSOCKET_ORIGIN = new AsciiString("sec-websocket-origin"); /** * {@code "sec-websocket-protocol"}. */ public static final AsciiString SEC_WEBSOCKET_PROTOCOL = new AsciiString("sec-websocket-protocol"); /** * {@code "sec-websocket-version"}. */ public static final AsciiString SEC_WEBSOCKET_VERSION = new AsciiString("sec-websocket-version"); /** * {@code "sec-websocket-key"}. */ public static final AsciiString SEC_WEBSOCKET_KEY = new AsciiString("sec-websocket-key"); /** * {@code "sec-websocket-accept"}. */ public static final AsciiString SEC_WEBSOCKET_ACCEPT = new AsciiString("sec-websocket-accept"); /** * {@code "sec-websocket-protocol"}. */ public static final AsciiString SEC_WEBSOCKET_EXTENSIONS = new AsciiString("sec-websocket-extensions"); /** * {@code "server"}. */ public static final AsciiString SERVER = new AsciiString("server"); /** * {@code "set-cookie"}. */ public static final AsciiString SET_COOKIE = new AsciiString("set-cookie"); /** * {@code "set-cookie2"}. */ public static final AsciiString SET_COOKIE2 = new AsciiString("set-cookie2"); /** * {@code "te"}. */ public static final AsciiString TE = new AsciiString("te"); /** * {@code "trailer"}. */ public static final AsciiString TRAILER = new AsciiString("trailer"); /** * {@code "transfer-encoding"}. */ public static final AsciiString TRANSFER_ENCODING = new AsciiString("transfer-encoding"); /** * {@code "upgrade"}. */ public static final AsciiString UPGRADE = new AsciiString("upgrade"); /** * {@code "user-agent"}. */ public static final AsciiString USER_AGENT = new AsciiString("user-agent"); /** * {@code "vary"}. */ public static final AsciiString VARY = new AsciiString("vary"); /** * {@code "via"}. */ public static final AsciiString VIA = new AsciiString("via"); /** * {@code "warning"}. */ public static final AsciiString WARNING = new AsciiString("warning"); /** * {@code "websocket-location"}. */ public static final AsciiString WEBSOCKET_LOCATION = new AsciiString("websocket-location"); /** * {@code "websocket-origin"}. */ public static final AsciiString WEBSOCKET_ORIGIN = new AsciiString("websocket-origin"); /** * {@code "websocket-protocol"}. */ public static final AsciiString WEBSOCKET_PROTOCOL = new AsciiString("websocket-protocol"); /** * {@code "www-authenticate"}. */ public static final AsciiString WWW_AUTHENTICATE = new AsciiString("www-authenticate"); /** * {@code "x-frame-options"}. */ public static final AsciiString X_FRAME_OPTIONS = new AsciiString("x-frame-options"); private static final Map<String, AsciiString> map; static { final ImmutableMap.Builder<String, AsciiString> builder = ImmutableMap.builder(); for (Field f : HttpHeaderNames.class.getDeclaredFields()) { final int m = f.getModifiers(); if (Modifier.isPublic(m) && Modifier.isStatic(m) && Modifier.isFinal(m) && f.getType() == AsciiString.class) { final AsciiString name; try { name = (AsciiString) f.get(null); } catch (Exception e) { throw new Error(e); } builder.put(name.toString(), name); } } map = builder.build(); } /** * Lower-cases and converts the specified header name into an {@link AsciiString}. If {@code name} is * a known header name, this method will return a pre-instantiated {@link AsciiString} to reduce * the allocation rate of {@link AsciiString}. */ public static AsciiString of(String name) { name = Ascii.toLowerCase(requireNonNull(name, "name")); final AsciiString asciiName = map.get(name); return asciiName != null ? asciiName : new AsciiString(name); } private HttpHeaderNames() {} }
core/src/main/java/com/linecorp/armeria/common/HttpHeaderNames.java
/* * Copyright 2015 LINE Corporation * * LINE Corporation licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ /* * Copyright 2014 The Netty Project * * The Netty Project licenses this file to you under the Apache License, * version 2.0 (the "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.linecorp.armeria.common; import static java.util.Objects.requireNonNull; import java.lang.reflect.Field; import java.lang.reflect.Modifier; import java.util.Map; import com.google.common.base.Ascii; import com.google.common.collect.ImmutableMap; import io.netty.util.AsciiString; /** * Standard HTTP header names. * * <p>These are all defined as lowercase to support HTTP/2 requirements while also not * violating HTTP/1.x requirements. New header names should always be lowercase. */ public final class HttpHeaderNames { // Pseudo-headers /** * {@code ":method"}. */ public static final AsciiString METHOD = new AsciiString(":method"); /** * {@code ":scheme"}. */ public static final AsciiString SCHEME = new AsciiString(":scheme"); /** * {@code ":authority"}. */ public static final AsciiString AUTHORITY = new AsciiString(":authority"); /** * {@code ":path"}. */ public static final AsciiString PATH = new AsciiString(":path"); /** * {@code ":status"}. */ public static final AsciiString STATUS = new AsciiString(":status"); // Ordinary headers /** * {@code "accept"}. */ public static final AsciiString ACCEPT = new AsciiString("accept"); /** * {@code "accept-charset"}. */ public static final AsciiString ACCEPT_CHARSET = new AsciiString("accept-charset"); /** * {@code "accept-encoding"}. */ public static final AsciiString ACCEPT_ENCODING = new AsciiString("accept-encoding"); /** * {@code "accept-language"}. */ public static final AsciiString ACCEPT_LANGUAGE = new AsciiString("accept-language"); /** * {@code "accept-ranges"}. */ public static final AsciiString ACCEPT_RANGES = new AsciiString("accept-ranges"); /** * {@code "accept-patch"}. */ public static final AsciiString ACCEPT_PATCH = new AsciiString("accept-patch"); /** * {@code "access-control-allow-credentials"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_CREDENTIALS = new AsciiString("access-control-allow-credentials"); /** * {@code "access-control-allow-headers"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_HEADERS = new AsciiString("access-control-allow-headers"); /** * {@code "access-control-allow-methods"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_METHODS = new AsciiString("access-control-allow-methods"); /** * {@code "access-control-allow-origin"}. */ public static final AsciiString ACCESS_CONTROL_ALLOW_ORIGIN = new AsciiString("access-control-allow-origin"); /** * {@code "access-control-expose-headers"}. */ public static final AsciiString ACCESS_CONTROL_EXPOSE_HEADERS = new AsciiString("access-control-expose-headers"); /** * {@code "access-control-max-age"}. */ public static final AsciiString ACCESS_CONTROL_MAX_AGE = new AsciiString("access-control-max-age"); /** * {@code "access-control-request-headers"}. */ public static final AsciiString ACCESS_CONTROL_REQUEST_HEADERS = new AsciiString("access-control-request-headers"); /** * {@code "access-control-request-method"}. */ public static final AsciiString ACCESS_CONTROL_REQUEST_METHOD = new AsciiString("access-control-request-method"); /** * {@code "age"}. */ public static final AsciiString AGE = new AsciiString("age"); /** * {@code "allow"}. */ public static final AsciiString ALLOW = new AsciiString("allow"); /** * {@code "authorization"}. */ public static final AsciiString AUTHORIZATION = new AsciiString("authorization"); /** * {@code "cache-control"}. */ public static final AsciiString CACHE_CONTROL = new AsciiString("cache-control"); /** * {@code "connection"}. */ public static final AsciiString CONNECTION = new AsciiString("connection"); /** * {@code "content-base"}. */ public static final AsciiString CONTENT_BASE = new AsciiString("content-base"); /** * {@code "content-encoding"}. */ public static final AsciiString CONTENT_ENCODING = new AsciiString("content-encoding"); /** * {@code "content-language"}. */ public static final AsciiString CONTENT_LANGUAGE = new AsciiString("content-language"); /** * {@code "content-length"}. */ public static final AsciiString CONTENT_LENGTH = new AsciiString("content-length"); /** * {@code "content-location"}. */ public static final AsciiString CONTENT_LOCATION = new AsciiString("content-location"); /** * {@code "content-transfer-encoding"}. */ public static final AsciiString CONTENT_TRANSFER_ENCODING = new AsciiString("content-transfer-encoding"); /** * {@code "content-disposition"}. */ public static final AsciiString CONTENT_DISPOSITION = new AsciiString("content-disposition"); /** * {@code "content-md5"}. */ public static final AsciiString CONTENT_MD5 = new AsciiString("content-md5"); /** * {@code "content-range"}. */ public static final AsciiString CONTENT_RANGE = new AsciiString("content-range"); /** * {@code "content-type"}. */ public static final AsciiString CONTENT_TYPE = new AsciiString("content-type"); /** * {@code "cookie"}. */ public static final AsciiString COOKIE = new AsciiString("cookie"); /** * {@code "date"}. */ public static final AsciiString DATE = new AsciiString("date"); /** * {@code "etag"}. */ public static final AsciiString ETAG = new AsciiString("etag"); /** * {@code "expect"}. */ public static final AsciiString EXPECT = new AsciiString("expect"); /** * {@code "expires"}. */ public static final AsciiString EXPIRES = new AsciiString("expires"); /** * {@code "from"}. */ public static final AsciiString FROM = new AsciiString("from"); /** * {@code "host"}. */ public static final AsciiString HOST = new AsciiString("host"); /** * {@code "if-match"}. */ public static final AsciiString IF_MATCH = new AsciiString("if-match"); /** * {@code "if-modified-since"}. */ public static final AsciiString IF_MODIFIED_SINCE = new AsciiString("if-modified-since"); /** * {@code "if-none-match"}. */ public static final AsciiString IF_NONE_MATCH = new AsciiString("if-none-match"); /** * {@code "if-range"}. */ public static final AsciiString IF_RANGE = new AsciiString("if-range"); /** * {@code "if-unmodified-since"}. */ public static final AsciiString IF_UNMODIFIED_SINCE = new AsciiString("if-unmodified-since"); /** * @deprecated Use {@link #CONNECTION} instead. * * {@code "keep-alive"} */ @Deprecated public static final AsciiString KEEP_ALIVE = new AsciiString("keep-alive"); /** * {@code "last-modified"}. */ public static final AsciiString LAST_MODIFIED = new AsciiString("last-modified"); /** * {@code "location"}. */ public static final AsciiString LOCATION = new AsciiString("location"); /** * {@code "max-forwards"}. */ public static final AsciiString MAX_FORWARDS = new AsciiString("max-forwards"); /** * {@code "origin"}. */ public static final AsciiString ORIGIN = new AsciiString("origin"); /** * {@code "pragma"}. */ public static final AsciiString PRAGMA = new AsciiString("pragma"); /** * {@code "proxy-authenticate"}. */ public static final AsciiString PROXY_AUTHENTICATE = new AsciiString("proxy-authenticate"); /** * {@code "proxy-authorization"}. */ public static final AsciiString PROXY_AUTHORIZATION = new AsciiString("proxy-authorization"); /** * @deprecated Use {@link #CONNECTION} instead. * * {@code "proxy-connection"} */ @Deprecated public static final AsciiString PROXY_CONNECTION = new AsciiString("proxy-connection"); /** * {@code "range"}. */ public static final AsciiString RANGE = new AsciiString("range"); /** * {@code "referer"}. */ public static final AsciiString REFERER = new AsciiString("referer"); /** * {@code "retry-after"}. */ public static final AsciiString RETRY_AFTER = new AsciiString("retry-after"); /** * {@code "sec-websocket-key1"}. */ public static final AsciiString SEC_WEBSOCKET_KEY1 = new AsciiString("sec-websocket-key1"); /** * {@code "sec-websocket-key2"}. */ public static final AsciiString SEC_WEBSOCKET_KEY2 = new AsciiString("sec-websocket-key2"); /** * {@code "sec-websocket-location"}. */ public static final AsciiString SEC_WEBSOCKET_LOCATION = new AsciiString("sec-websocket-location"); /** * {@code "sec-websocket-origin"}. */ public static final AsciiString SEC_WEBSOCKET_ORIGIN = new AsciiString("sec-websocket-origin"); /** * {@code "sec-websocket-protocol"}. */ public static final AsciiString SEC_WEBSOCKET_PROTOCOL = new AsciiString("sec-websocket-protocol"); /** * {@code "sec-websocket-version"}. */ public static final AsciiString SEC_WEBSOCKET_VERSION = new AsciiString("sec-websocket-version"); /** * {@code "sec-websocket-key"}. */ public static final AsciiString SEC_WEBSOCKET_KEY = new AsciiString("sec-websocket-key"); /** * {@code "sec-websocket-accept"}. */ public static final AsciiString SEC_WEBSOCKET_ACCEPT = new AsciiString("sec-websocket-accept"); /** * {@code "sec-websocket-protocol"}. */ public static final AsciiString SEC_WEBSOCKET_EXTENSIONS = new AsciiString("sec-websocket-extensions"); /** * {@code "server"}. */ public static final AsciiString SERVER = new AsciiString("server"); /** * {@code "set-cookie"}. */ public static final AsciiString SET_COOKIE = new AsciiString("set-cookie"); /** * {@code "set-cookie2"}. */ public static final AsciiString SET_COOKIE2 = new AsciiString("set-cookie2"); /** * {@code "te"}. */ public static final AsciiString TE = new AsciiString("te"); /** * {@code "trailer"}. */ public static final AsciiString TRAILER = new AsciiString("trailer"); /** * {@code "transfer-encoding"}. */ public static final AsciiString TRANSFER_ENCODING = new AsciiString("transfer-encoding"); /** * {@code "upgrade"}. */ public static final AsciiString UPGRADE = new AsciiString("upgrade"); /** * {@code "user-agent"}. */ public static final AsciiString USER_AGENT = new AsciiString("user-agent"); /** * {@code "vary"}. */ public static final AsciiString VARY = new AsciiString("vary"); /** * {@code "via"}. */ public static final AsciiString VIA = new AsciiString("via"); /** * {@code "warning"}. */ public static final AsciiString WARNING = new AsciiString("warning"); /** * {@code "websocket-location"}. */ public static final AsciiString WEBSOCKET_LOCATION = new AsciiString("websocket-location"); /** * {@code "websocket-origin"}. */ public static final AsciiString WEBSOCKET_ORIGIN = new AsciiString("websocket-origin"); /** * {@code "websocket-protocol"}. */ public static final AsciiString WEBSOCKET_PROTOCOL = new AsciiString("websocket-protocol"); /** * {@code "www-authenticate"}. */ public static final AsciiString WWW_AUTHENTICATE = new AsciiString("www-authenticate"); private static final Map<String, AsciiString> map; static { final ImmutableMap.Builder<String, AsciiString> builder = ImmutableMap.builder(); for (Field f : HttpHeaderNames.class.getDeclaredFields()) { final int m = f.getModifiers(); if (Modifier.isPublic(m) && Modifier.isStatic(m) && Modifier.isFinal(m) && f.getType() == AsciiString.class) { final AsciiString name; try { name = (AsciiString) f.get(null); } catch (Exception e) { throw new Error(e); } builder.put(name.toString(), name); } } map = builder.build(); } /** * Lower-cases and converts the specified header name into an {@link AsciiString}. If {@code name} is * a known header name, this method will return a pre-instantiated {@link AsciiString} to reduce * the allocation rate of {@link AsciiString}. */ public static AsciiString of(String name) { name = Ascii.toLowerCase(requireNonNull(name, "name")); final AsciiString asciiName = map.get(name); return asciiName != null ? asciiName : new AsciiString(name); } private HttpHeaderNames() {} }
Add 'content-security-policy' and 'x-frame-options' to HttpHeaderNames (#657) Also: - Sort the header names alphabetically - Fix incorrect use of `@deprecated` Javadoc tag Related: - https://github.com/netty/netty/pull/6904
core/src/main/java/com/linecorp/armeria/common/HttpHeaderNames.java
Add 'content-security-policy' and 'x-frame-options' to HttpHeaderNames (#657)
Java
apache-2.0
30933c7635782b4c2ac98c2dc960ab891f468d8f
0
HuyLafa/codeu_project_2017,HuyLafa/codeu_project_2017,HuyLafa/codeu_project_2017,HuyLafa/codeu_project_2017
package controllers; import play.mvc.*; import akka.NotUsed; import akka.actor.ActorSystem; import akka.event.LoggingAdapter; import akka.japi.Pair; import akka.japi.pf.PFBuilder; import akka.stream.Materializer; import akka.stream.javadsl.*; import akka.event.Logging; import play.libs.F; import play.mvc.Controller; import javax.inject.Inject; import java.net.URL; import java.util.concurrent.CompletableFuture; import views.html.chat; /** * A chat client using WebSocket. */ public class ChatController extends Controller { private final Flow<String, String, NotUsed> userFlow, userFlow2; @Inject public ChatController(ActorSystem actorSystem, Materializer mat) { System.out.println("injected"); org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(this.getClass()); LoggingAdapter logging = Logging.getLogger(actorSystem.eventStream(), logger.getName()); //noinspection unchecked Source<String, Sink<String, NotUsed>> source = MergeHub.of(String.class) .log("source", logging) .recoverWithRetries(-1, new PFBuilder().match(Throwable.class, e -> Source.empty()).build()); Sink<String, Source<String, NotUsed>> sink = BroadcastHub.of(String.class); Pair<Sink<String, NotUsed>, Source<String, NotUsed>> sinkSourcePair = source.toMat(sink, Keep.both()).run(mat); Sink<String, NotUsed> chatSink = sinkSourcePair.first(); Source<String, NotUsed> chatSource = sinkSourcePair.second(); this.userFlow = Flow.fromSinkAndSource(chatSink, chatSource).log("userFlow", logging); //noinspection unchecked source = MergeHub.of(String.class) .log("source2", logging) .recoverWithRetries(-1, new PFBuilder().match(Throwable.class, e -> Source.empty()).build()); sink = BroadcastHub.of(String.class); sinkSourcePair = source.toMat(sink, Keep.both()).run(mat); chatSink = sinkSourcePair.first(); chatSource = sinkSourcePair.second(); this.userFlow2 = Flow.fromSinkAndSource(chatSink, chatSource).log("userFlow2", logging); // this.userFlow3 = Flow.fromSinkAndSource(chatSink, chatSource).log("userFlow3", logging); } public Result index() { if (session("username") == null) { return redirect(routes.LoginController.display()); } Http.Request request = request(); System.out.println("index request :" + request); String url = routes.ChatController.websocket("room1").webSocketURL(request); System.out.println("original url: " + routes.ChatController.websocket("room1").url()); System.out.println("index url: " + url); return ok(chat.render(session("username"), url)); } public Result index2() { if (session("username") == null) { return redirect(routes.LoginController.display()); } Http.Request request = request(); String url = routes.ChatController.chat2().webSocketURL(request); System.out.println("index 2 request: " + url); return ok(chat.render(session("username"), url)); } public WebSocket websocket(String room) { return WebSocket.Text.acceptOrResult(request -> { System.out.println("give me the request: " + request); if (sameOriginCheck(request)) { return CompletableFuture.completedFuture(F.Either.Right(userFlow)); } else { return CompletableFuture.completedFuture(F.Either.Left(forbidden())); } }); } public WebSocket chat2() { return WebSocket.Text.acceptOrResult(request -> { if (sameOriginCheck(request)) { return CompletableFuture.completedFuture(F.Either.Right(userFlow2)); } else { return CompletableFuture.completedFuture(F.Either.Left(forbidden())); } }); } /** * Checks that the WebSocket comes from the same origin. This is necessary to protect * against Cross-Site WebSocket Hijacking as WebSocket does not implement Same Origin Policy. * * See https://tools.ietf.org/html/rfc6455#section-1.3 and * http://blog.dewhurstsecurity.com/2013/08/30/security-testing-html5-websockets.html */ private boolean sameOriginCheck(Http.RequestHeader request) { String[] origins = request.headers().get("Origin"); if (origins.length > 1) { // more than one origin found return false; } String origin = origins[0]; return originMatches(origin); } private boolean originMatches(String origin) { if (origin == null) return false; try { URL url = new URL(origin); return url.getHost().equals("localhost") && (url.getPort() == 9000 || url.getPort() == 19001); } catch (Exception e ) { return false; } } }
app/controllers/ChatController.java
package controllers; import play.mvc.*; import akka.NotUsed; import akka.actor.ActorSystem; import akka.event.LoggingAdapter; import akka.japi.Pair; import akka.japi.pf.PFBuilder; import akka.stream.Materializer; import akka.stream.javadsl.*; import akka.event.Logging; import play.libs.F; import play.mvc.Controller; import javax.inject.Inject; import java.net.URL; import java.util.concurrent.CompletableFuture; import views.html.chat; /** * A chat client using WebSocket. */ public class ChatController extends Controller { private final Flow<String, String, NotUsed> userFlow, userFlow2; @Inject public ChatController(ActorSystem actorSystem, Materializer mat) { System.out.println("injected"); org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(this.getClass()); LoggingAdapter logging = Logging.getLogger(actorSystem.eventStream(), logger.getName()); //noinspection unchecked Source<String, Sink<String, NotUsed>> source = MergeHub.of(String.class) .log("source", logging) .recoverWithRetries(-1, new PFBuilder().match(Throwable.class, e -> Source.empty()).build()); Sink<String, Source<String, NotUsed>> sink = BroadcastHub.of(String.class); Pair<Sink<String, NotUsed>, Source<String, NotUsed>> sinkSourcePair = source.toMat(sink, Keep.both()).run(mat); Sink<String, NotUsed> chatSink = sinkSourcePair.first(); Source<String, NotUsed> chatSource = sinkSourcePair.second(); this.userFlow = Flow.fromSinkAndSource(chatSink, chatSource).log("userFlow", logging); //noinspection unchecked source = MergeHub.of(String.class) .log("source2", logging) .recoverWithRetries(-1, new PFBuilder().match(Throwable.class, e -> Source.empty()).build()); sink = BroadcastHub.of(String.class); sinkSourcePair = source.toMat(sink, Keep.both()).run(mat); chatSink = sinkSourcePair.first(); chatSource = sinkSourcePair.second(); this.userFlow2 = Flow.fromSinkAndSource(chatSink, chatSource).log("userFlow2", logging); // this.userFlow3 = Flow.fromSinkAndSource(chatSink, chatSource).log("userFlow3", logging); } public Result index() { if (session("username") == null) { return redirect(routes.LoginController.display()); } Http.Request request = request(); String url = routes.ChatController.chat().webSocketURL(request); System.out.println("index request: " + url); return ok(chat.render(session("username"), url)); } public Result index2() { if (session("username") == null) { return redirect(routes.LoginController.display()); } Http.Request request = request(); String url = routes.ChatController.chat2().webSocketURL(request); System.out.println("index 2 request: " + url); return ok(chat.render(session("username"), url)); } public WebSocket chat() { return WebSocket.Text.acceptOrResult(request -> { if (sameOriginCheck(request)) { return CompletableFuture.completedFuture(F.Either.Right(userFlow)); } else { return CompletableFuture.completedFuture(F.Either.Left(forbidden())); } }); } public WebSocket chat2() { return WebSocket.Text.acceptOrResult(request -> { if (sameOriginCheck(request)) { return CompletableFuture.completedFuture(F.Either.Right(userFlow2)); } else { return CompletableFuture.completedFuture(F.Either.Left(forbidden())); } }); } /** * Checks that the WebSocket comes from the same origin. This is necessary to protect * against Cross-Site WebSocket Hijacking as WebSocket does not implement Same Origin Policy. * * See https://tools.ietf.org/html/rfc6455#section-1.3 and * http://blog.dewhurstsecurity.com/2013/08/30/security-testing-html5-websockets.html */ private boolean sameOriginCheck(Http.RequestHeader request) { String[] origins = request.headers().get("Origin"); if (origins.length > 1) { // more than one origin found return false; } String origin = origins[0]; return originMatches(origin); } private boolean originMatches(String origin) { if (origin == null) return false; try { URL url = new URL(origin); return url.getHost().equals("localhost") && (url.getPort() == 9000 || url.getPort() == 19001); } catch (Exception e ) { return false; } } }
Make custom URL for different websockets.
app/controllers/ChatController.java
Make custom URL for different websockets.
Java
apache-2.0
acaacf507f351c596e4b77de63ed5453b5b0871a
0
trifork/erjang,trifork/erjang,trifork/erjang,csae1152/erjang,trifork/erjang,csae1152/erjang,trifork/erjang,csae1152/erjang,csae1152/erjang,trifork/erjang,trifork/erjang,csae1152/erjang
/** * This file is part of Erjang - A JVM-based Erlang VM * * Copyright (c) 2009 by Trifork * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package erjang.m.erlang; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import kilim.Pausable; import erjang.BIF; import erjang.EAtom; import erjang.EInternalPort; import erjang.EObject; import erjang.EPort; import erjang.EProc; import erjang.ERT; import erjang.ESeq; import erjang.ESmall; import erjang.EString; import erjang.ETask; import erjang.ETuple; import erjang.ETuple2; import erjang.ETuple3; import erjang.NotImplemented; import erjang.driver.EDriver; import erjang.driver.EDriverTask; import erjang.driver.EExecDriverTask; import erjang.driver.EFDDriverTask; import erjang.driver.ESpawnDriverTask; import erjang.driver.tcp_inet.TCPINet; /** * */ public class ErlPort { public static final EAtom am_fd = EAtom.intern("fd"); private static final ByteBuffer EMPTY_BYTEBUFFER = ByteBuffer.allocate(0); static EAtom am_spawn = EAtom.intern("spawn"); static EAtom am_spawn_driver = EAtom.intern("spawn_driver"); static EAtom am_spawn_executable = EAtom.intern("spawn_executable"); @BIF static EObject port_command(EProc proc, EObject port, EObject data) throws Pausable { EInternalPort p = port.testInternalPort(); if (ERT.DEBUG_PORT) System.err.print("port_command "+port+", "+data); if (p == null) { port = ERT.whereis(port); if (port == ERT.am_undefined) port = null; else p = port.testInternalPort(); } List<ByteBuffer> ovec = new ArrayList<ByteBuffer>(); if (p == null || !data.collectIOList(ovec)) { if (ERT.DEBUG_PORT) { System.err.println("collect failed! or p==null: "+p); } throw ERT.badarg(port, data); } ByteBuffer[] out = new ByteBuffer[ovec.size()]; ovec.toArray(out); if (ERT.DEBUG_PORT) { System.err.print("EVEC: "); TCPINet.dump_write(out); } // System.err.println("packing "+data+"::"+data.getClass().getName()+" -> "+ovec); p.command(proc.self_handle(), out); return ERT.TRUE; } @BIF static EObject port_control(EProc proc, EObject port, EObject operation, EObject data) { try { return port_control0(proc, port, operation, data); } catch (RuntimeException e) { e.printStackTrace(); throw e; } catch (Error e) { e.printStackTrace(); throw e; } } static EObject port_control0(EProc proc, EObject port, EObject operation, EObject data) { EInternalPort p = port.testInternalPort(); if (p == null) { port = ERT.whereis(port); if (port == ERT.am_undefined) port = null; p = port.testInternalPort(); } ESmall op = operation.testSmall(); List<ByteBuffer> ovec = new ArrayList<ByteBuffer>(); if (p == null || op == null || !data.collectIOList(ovec)) { throw ERT.badarg(port, operation, data); } ByteBuffer cmd = flatten(ovec); // TODO: improve exception handling/wrapping here so we get // ErlangException types only! return p.control(proc, op.value, cmd); } private static ByteBuffer flatten(List<ByteBuffer> ovec) { if (ovec.size() == 0) { return EMPTY_BYTEBUFFER; } else if (ovec.size() == 1) { return ovec.get(0); } int len = 0; for (int i = 0; i < ovec.size(); i++) { len += ovec.get(i).remaining(); } ByteBuffer res = ByteBuffer.allocate(len); for (ByteBuffer bb : ovec) { res.put(bb); } res.rewind(); return res; } @BIF static EObject port_call(EProc proc, EObject port, EObject operation, EObject data) { EInternalPort p = port.testInternalPort(); if (p == null) { port = ERT.whereis(port); if (port == ERT.am_undefined) port = null; p = port.testInternalPort(); } ESmall op = operation.testSmall(); if (p == null || op == null) { throw ERT.badarg(port, operation, data); } // TODO: improve exception handling/wrapping here so we get // ErlangException types only! return p.call(proc, op.value, data); } @BIF static EPort open_port(EProc proc, EObject portName, EObject portSetting) throws Pausable { ETuple t; if ((t = portName.testTuple()) == null) throw ERT.badarg(portName, portSetting); ETask<? extends EPort> task = null; ETuple2 name; ETuple3 name3; if ((name = ETuple2.cast(t)) != null) { EString command = (EString)EString.make(name.elem2); if (name.elem1 == am_spawn) { EDriver drv = ERT.find_driver(command); if (drv == null) { task = new EExecDriverTask(proc, name, portSetting); } else { task = new ESpawnDriverTask(proc, drv, command, portSetting); } } else if (name.elem1 == am_spawn_driver) { EDriver drv = ERT.find_driver(command); if (drv == null) { throw ERT.badarg(portName, portSetting); } task = new ESpawnDriverTask(proc, drv, command, portSetting); } else if (name.elem1 == am_spawn_executable) { task = new EExecDriverTask(proc, name, portSetting); } } else if ((name3 = ETuple3.cast(portName)) != null && name3.elem1 == am_fd) { ESmall in = name3.elem2.testSmall(); ESmall out = name3.elem3.testSmall(); if (in == null || out == null) throw ERT.badarg(portName, portSetting); task = new EFDDriverTask(proc, in.value, out.value, portSetting); } if (task != null) { // link this proc and the driver task // task.link_to(proc); ERT.run(task); return task.self_handle(); } throw ERT.badarg(portName, portSetting); } @BIF static public EObject port_close(EProc proc, EObject port) throws Pausable { EPort p; if ((p = port.testPort()) == null) { EObject obj = ERT.whereis(port); if (obj == ERT.am_undefined || ((p = obj.testPort()) == null)) { throw ERT.badarg(port); } } if (!p.isOpen()) { throw ERT.badarg(port); } proc.unlink(p); p.send(new ETuple2(proc.self_handle(), EPort.am_close)); return ERT.TRUE; } @BIF static public ESeq ports() { return EDriverTask.all_ports(); } @BIF static public EObject port_info(EObject a1, EObject a2) { EPort p = a1.testPort(); EAtom spec = a2.testAtom(); if (p==null || spec==null) throw ERT.badarg(); return p.port_info(spec); } @BIF static public EObject port_set_data(EObject port, EObject data) { EPort p = id_or_name2port(port); if (p == null) { throw ERT.badarg(port, data); } p.set_data(data); return data; } @BIF static public EObject port_get_data(EObject port) { EPort p = id_or_name2port(port); if (p == null) { throw ERT.badarg(port); } return p.get_data(); } private static EPort id_or_name2port(EObject port) { EPort p = port.testPort(); if (p != null) return p; EObject p2 = ERT.whereis(port); // p2 is ERT.am_undefined if not found return p2.testPort(); } }
src/main/java/erjang/m/erlang/ErlPort.java
/** * This file is part of Erjang - A JVM-based Erlang VM * * Copyright (c) 2009 by Trifork * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. **/ package erjang.m.erlang; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.List; import kilim.Pausable; import erjang.BIF; import erjang.EAtom; import erjang.EInternalPort; import erjang.EObject; import erjang.EPort; import erjang.EProc; import erjang.ERT; import erjang.ESeq; import erjang.ESmall; import erjang.EString; import erjang.ETask; import erjang.ETuple; import erjang.ETuple2; import erjang.ETuple3; import erjang.NotImplemented; import erjang.driver.EDriver; import erjang.driver.EDriverTask; import erjang.driver.EExecDriverTask; import erjang.driver.EFDDriverTask; import erjang.driver.ESpawnDriverTask; /** * */ public class ErlPort { public static final EAtom am_fd = EAtom.intern("fd"); private static final ByteBuffer EMPTY_BYTEBUFFER = ByteBuffer.allocate(0); static EAtom am_spawn = EAtom.intern("spawn"); static EAtom am_spawn_driver = EAtom.intern("spawn_driver"); static EAtom am_spawn_executable = EAtom.intern("spawn_executable"); @BIF static EObject port_command(EProc proc, EObject port, EObject data) throws Pausable { EInternalPort p = port.testInternalPort(); if (p == null) { port = ERT.whereis(port); if (port == ERT.am_undefined) port = null; p = port.testInternalPort(); } List<ByteBuffer> ovec = new ArrayList<ByteBuffer>(); if (p == null || !data.collectIOList(ovec)) { throw ERT.badarg(port, data); } ByteBuffer[] out = new ByteBuffer[ovec.size()]; ovec.toArray(out); // System.err.println("packing "+data+"::"+data.getClass().getName()+" -> "+ovec); p.command(proc.self_handle(), out); return ERT.TRUE; } @BIF static EObject port_control(EProc proc, EObject port, EObject operation, EObject data) { try { return port_control0(proc, port, operation, data); } catch (RuntimeException e) { e.printStackTrace(); throw e; } catch (Error e) { e.printStackTrace(); throw e; } } static EObject port_control0(EProc proc, EObject port, EObject operation, EObject data) { EInternalPort p = port.testInternalPort(); if (p == null) { port = ERT.whereis(port); if (port == ERT.am_undefined) port = null; p = port.testInternalPort(); } ESmall op = operation.testSmall(); List<ByteBuffer> ovec = new ArrayList<ByteBuffer>(); if (p == null || op == null || !data.collectIOList(ovec)) { throw ERT.badarg(port, operation, data); } ByteBuffer cmd = flatten(ovec); // TODO: improve exception handling/wrapping here so we get // ErlangException types only! return p.control(proc, op.value, cmd); } private static ByteBuffer flatten(List<ByteBuffer> ovec) { if (ovec.size() == 0) { return EMPTY_BYTEBUFFER; } else if (ovec.size() == 1) { return ovec.get(0); } int len = 0; for (int i = 0; i < ovec.size(); i++) { len += ovec.get(i).remaining(); } ByteBuffer res = ByteBuffer.allocate(len); for (ByteBuffer bb : ovec) { res.put(bb); } res.rewind(); return res; } @BIF static EObject port_call(EProc proc, EObject port, EObject operation, EObject data) { EInternalPort p = port.testInternalPort(); if (p == null) { port = ERT.whereis(port); if (port == ERT.am_undefined) port = null; p = port.testInternalPort(); } ESmall op = operation.testSmall(); if (p == null || op == null) { throw ERT.badarg(port, operation, data); } // TODO: improve exception handling/wrapping here so we get // ErlangException types only! return p.call(proc, op.value, data); } @BIF static EPort open_port(EProc proc, EObject portName, EObject portSetting) throws Pausable { ETuple t; if ((t = portName.testTuple()) == null) throw ERT.badarg(portName, portSetting); ETask<? extends EPort> task = null; ETuple2 name; ETuple3 name3; if ((name = ETuple2.cast(t)) != null) { EString command = (EString)EString.make(name.elem2); if (name.elem1 == am_spawn) { EDriver drv = ERT.find_driver(command); if (drv == null) { task = new EExecDriverTask(proc, name, portSetting); } else { task = new ESpawnDriverTask(proc, drv, command, portSetting); } } else if (name.elem1 == am_spawn_driver) { EDriver drv = ERT.find_driver(command); if (drv == null) { throw ERT.badarg(portName, portSetting); } task = new ESpawnDriverTask(proc, drv, command, portSetting); } else if (name.elem1 == am_spawn_executable) { task = new EExecDriverTask(proc, name, portSetting); } } else if ((name3 = ETuple3.cast(portName)) != null && name3.elem1 == am_fd) { ESmall in = name3.elem2.testSmall(); ESmall out = name3.elem3.testSmall(); if (in == null || out == null) throw ERT.badarg(portName, portSetting); task = new EFDDriverTask(proc, in.value, out.value, portSetting); } if (task != null) { // link this proc and the driver task // task.link_to(proc); ERT.run(task); return task.self_handle(); } throw ERT.badarg(portName, portSetting); } @BIF static public EObject port_close(EProc proc, EObject port) throws Pausable { EPort p; if ((p = port.testPort()) == null) { EObject obj = ERT.whereis(port); if (obj == ERT.am_undefined || ((p = obj.testPort()) == null)) { throw ERT.badarg(port); } } if (!p.isOpen()) { throw ERT.badarg(port); } proc.unlink(p); p.send(new ETuple2(proc.self_handle(), EPort.am_close)); return ERT.TRUE; } @BIF static public ESeq ports() { return EDriverTask.all_ports(); } @BIF static public EObject port_info(EObject a1, EObject a2) { EPort p = a1.testPort(); EAtom spec = a2.testAtom(); if (p==null || spec==null) throw ERT.badarg(); return p.port_info(spec); } @BIF static public EObject port_set_data(EObject port, EObject data) { EPort p = id_or_name2port(port); if (p == null) { throw ERT.badarg(port, data); } p.set_data(data); return data; } @BIF static public EObject port_get_data(EObject port) { EPort p = id_or_name2port(port); if (p == null) { throw ERT.badarg(port); } return p.get_data(); } private static EPort id_or_name2port(EObject port) { EPort p = port.testPort(); if (p != null) return p; EObject p2 = ERT.whereis(port); // p2 is ERT.am_undefined if not found return p2.testPort(); } }
Extra debug info in erlang:port_command/2
src/main/java/erjang/m/erlang/ErlPort.java
Extra debug info in erlang:port_command/2
Java
apache-2.0
065967b3e89724b90551ec22f2ff12f0a4671eb5
0
brianm/gressil
package org.skife.gressil; import jnr.posix.POSIX; import jnr.posix.POSIXFactory; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import static java.util.Arrays.asList; public class Spawn { private final File pidfile; private final File out; private final File err; private final List<String> extraVmArgs; private final List<String> extraProgramArgs; public Spawn() { this(null, new File("/dev/null"), new File("/dev/null"), Collections.<String>emptyList(), Collections.<String>emptyList()); } private Spawn(File pidfile, File out, File err, List<String> extraVmArgs, List<String> extraProgramArgs) { this.pidfile = pidfile; this.out = out; this.err = err; this.extraVmArgs = extraVmArgs; this.extraProgramArgs = extraProgramArgs; } public Spawn withExtraVmArgs(List<String> extraVmArgs) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withExtraVmArgs(String... extraVmArgs) { return new Spawn(pidfile, out, err, asList(extraVmArgs), extraProgramArgs); } public Spawn withExtraProgramArgs(List<String> extraProgramArgs) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withExtraProgramArgs(String... extraProgramArgs) { return new Spawn(pidfile, out, err, extraVmArgs, asList(extraProgramArgs)); } public Spawn withPidFile(File pidfile) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withStdout(File out) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withStderr(File err) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Status spawnSelf() throws IOException { POSIX posix = POSIXFactory.getPOSIX(); if (isDaemon()) { posix.setsid(); OutputStream old_out = System.out; OutputStream old_err = System.err; System.setOut(new PrintStream(new FileOutputStream(out, true))); System.setErr(new PrintStream(new FileOutputStream(err, true))); old_err.close(); old_out.close(); if (pidfile != null) { FileOutputStream p_out = new FileOutputStream(pidfile); p_out.write(String.valueOf(posix.getpid()).getBytes()); p_out.close(); } return Status.child(posix.getpid()); } else { List<POSIX.SpawnFileAction> close_streams = asList(); List<String> envp = getEnv(); envp.add(Spawn.class.getName() + "=daemon"); List<String> argv = buildARGV(); int child_pid = posix.posix_spawnp(argv.get(0), close_streams, buildARGV(), envp); return Status.parent(child_pid); } } public void daemonize() throws IOException { if (spawnSelf().isParent()) { System.exit(0); } } public static boolean isDaemon() { return "daemon".equals(System.getenv(Spawn.class.getName())); } public List<String> buildARGV() { List<String> ARGV = new ArrayList<String>(); String java = System.getProperty("java.home") + "/bin/java"; ARGV.add(java); List<String> its = ManagementFactory.getRuntimeMXBean().getInputArguments(); List<String> fixed = new ArrayList<String>(); StringBuilder current = new StringBuilder(); for (String it : its) { if (it.startsWith("-")) { if (current.length() > 0) { fixed.add(current.toString()); current = new StringBuilder(); } current.append(it); } else { current.append("\\ ").append(it); } } ARGV.addAll(fixed); String[] java_sun_command = System.getProperty("sun.java.command").split(" "); if (java_sun_command[0].endsWith(".jar")) { ARGV.add("-jar"); ARGV.add(new File(java_sun_command[0]).getPath()); } else { // else it's a .class, add the classpath and mainClass ARGV.add("-cp"); String raw_cp = System.getProperty("java.class.path").replaceAll(" ", "\\ "); ARGV.add(raw_cp); ARGV.add(java_sun_command[0]); } ARGV.addAll(Arrays.asList(java_sun_command).subList(1, java_sun_command.length)); return ARGV; } public List<String> getEnv() { String[] envp = new String[System.getenv().size()]; int i = 0; for (Map.Entry<String, String> pair : System.getenv().entrySet()) { envp[i++] = new StringBuilder(pair.getKey()).append("=").append(pair.getValue()).toString(); } return new ArrayList<String>(asList(envp)); } }
src/main/java/org/skife/gressil/Spawn.java
package org.skife.gressil; import jnr.posix.POSIX; import jnr.posix.POSIXFactory; import java.io.File; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; import java.io.PrintStream; import java.lang.management.ManagementFactory; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; import static java.util.Arrays.asList; public class Spawn { private final File pidfile; private final File out; private final File err; private final List<String> extraVmArgs; private final List<String> extraProgramArgs; public Spawn() { this(null, new File("/dev/null"), new File("/dev/null"), Collections.<String>emptyList(), Collections.<String>emptyList()); } private Spawn(File pidfile, File out, File err, List<String> extraVmArgs, List<String> extraProgramArgs) { this.pidfile = pidfile; this.out = out; this.err = err; this.extraVmArgs = extraVmArgs; this.extraProgramArgs = extraProgramArgs; } public Spawn withExtraVmArgs(List<String> extraVmArgs) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withExtraVmArgs(String... extraVmArgs) { return new Spawn(pidfile, out, err, asList(extraVmArgs), extraProgramArgs); } public Spawn withExtraProgramArgs(List<String> extraProgramArgs) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withExtraProgramArgs(String... extraProgramArgs) { return new Spawn(pidfile, out, err, extraVmArgs, asList(extraProgramArgs)); } public Spawn withPidFile(File pidfile) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withStdout(File out) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Spawn withStderr(File err) { return new Spawn(pidfile, out, err, extraVmArgs, extraProgramArgs); } public Status spawnSelf() throws IOException { POSIX posix = POSIXFactory.getPOSIX(); if (isDaemon()) { posix.setsid(); OutputStream old_out = System.out; OutputStream old_err = System.err; System.setOut(new PrintStream(new FileOutputStream(out, true))); System.setErr(new PrintStream(new FileOutputStream(err, true))); old_err.close(); old_out.close(); if (pidfile != null) { FileOutputStream p_out = new FileOutputStream(pidfile); p_out.write(String.valueOf(posix.getpid()).getBytes()); p_out.close(); } return Status.child(posix.getpid()); } else { List<POSIX.SpawnFileAction> close_streams = asList(); List<String> envp = getEnv(); envp.add(Spawn.class.getName() + "=daemon"); List<String> argv = buildARGV(); int child_pid = posix.posix_spawnp(argv.get(0), close_streams, buildARGV(), envp); return Status.parent(child_pid); } } public void daemonize() throws IOException { if (spawnSelf().isParent()) { System.exit(0); } } public static boolean isDaemon() { return "daemon".equals(System.getenv(Spawn.class.getName())); } public List<String> buildARGV() { List<String> ARGV = new ArrayList<String>(); String java = System.getProperty("java.home") + "/bin/java"; ARGV.add(java); List<String> its = ManagementFactory.getRuntimeMXBean().getInputArguments(); List<String> fixed = new ArrayList<String>(); StringBuilder current = new StringBuilder(); for (String it : its) { if (it.startsWith("-")) { if (current.length() > 0) { fixed.add(current.toString()); current = new StringBuilder(); } current.append(it); }te else { current.append("\\ ").append(it); } } ARGV.addAll(fixed); String[] java_sun_command = System.getProperty("sun.java.command").split(" "); if (java_sun_command[0].endsWith(".jar")) { ARGV.add("-jar"); ARGV.add(new File(java_sun_command[0]).getPath()); } else { // else it's a .class, add the classpath and mainClass ARGV.add("-cp"); String raw_cp = System.getProperty("java.class.path").replaceAll(" ", "\\ "); ARGV.add(raw_cp); ARGV.add(java_sun_command[0]); } ARGV.addAll(Arrays.asList(java_sun_command).subList(1, java_sun_command.length)); return ARGV; } public List<String> getEnv() { String[] envp = new String[System.getenv().size()]; int i = 0; for (Map.Entry<String, String> pair : System.getenv().entrySet()) { envp[i++] = new StringBuilder(pair.getKey()).append("=").append(pair.getValue()).toString(); } return new ArrayList<String>(asList(envp)); } }
extra chars snuck in, again
src/main/java/org/skife/gressil/Spawn.java
extra chars snuck in, again
Java
apache-2.0
cb5d64b7aa1e1ab6ccb1213fcfba35b2118db5e4
0
ronsigal/xerces,jimma/xerces,RackerWilliams/xercesj,RackerWilliams/xercesj,ronsigal/xerces,jimma/xerces,ronsigal/xerces,jimma/xerces,RackerWilliams/xercesj
/* $Id$ */ /* * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact apache\@apache.org. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation, and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.ibm.com . For more information * on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package dom.events; import org.w3c.dom.*; import org.w3c.dom.events.*; public class Test { EventReporter sharedReporter=new EventReporter(); public static void main(String[] args) { Test met=new Test(); met.runTest(); } void runTest() { Document doc=new org.apache.xerces.dom.DocumentImpl(); reportAllMutations(doc); Element root=addNoisyElement(doc,doc,0); Element e=null; int i; // Individual nodes e=addNoisyElement(doc,root,0); Attr a=addNoisyAttr(doc,e,0); a.setNodeValue("Updated A0 of E0, prepare to be acidulated."); NamedNodeMap nnm=e.getAttributes(); nnm.removeNamedItem(a.getName()); nnm.setNamedItem(a); // InsertedInto/RemovedFrom tests. // ***** These do not currently cross the Attr/Element barrier. // DOM spec is pretty clear on that, but this may not be the intent. System.out.println(); System.out.println("Add/remove a preconstructed tree; tests AddedToDocument"); System.out.println(); sharedReporter.off(); Element lateAdd=doc.createElement("lateAdd"); reportAllMutations(lateAdd); e=lateAdd; for(i=0;i<2;++i) { e=addNoisyElement(doc,e,i); addNoisyAttr(doc,e,i); } sharedReporter.on(); root.appendChild(lateAdd); root.removeChild(lateAdd); System.out.println(); System.out.println("Replace a preconstructed tree; tests AddedToDocument"); System.out.println(); sharedReporter.off(); Node e0=root.replaceChild(lateAdd,root.getFirstChild()); sharedReporter.on(); root.replaceChild(e0,lateAdd); sharedReporter.off(); Text t = addNoisyText(doc, root.getFirstChild(), "fo"); sharedReporter.on(); t.insertData(1, "o"); root.setAttribute("foo", "bar"); System.out.println("Done"); } Element addNoisyElement(Document doc,Node parent,int index) { String nodeName="Root"; if(parent!=doc) nodeName=parent.getNodeName()+"_E"+index; Element e=doc.createElement(nodeName); reportAllMutations(e); parent.appendChild(e); return e; } Attr addNoisyAttr(Document doc,Element parent,int index) { String attrName=parent.getNodeName()+"_A"+index; Attr a=doc.createAttribute(attrName); reportAllMutations(a); a.setNodeValue("Initialized A"+index+" of "+parent.getNodeName()); parent.setAttributeNode(a); return a; } Text addNoisyText(Document doc, Node parent, String data) { Text t = doc.createTextNode(data); reportAllMutations(t); parent.appendChild(t); return t; } void reportAllMutations(Node n) { String[] evtNames={ "DOMSubtreeModified","DOMAttrModified","DOMCharacterDataModified", "DOMNodeInserted","DOMNodeRemoved", "DOMNodeInsertedIntoDocument","DOMNodeRemovedFromDocument", }; EventTarget t=(EventTarget)n; for(int i=evtNames.length-1; i>=0; --i) { t.addEventListener(evtNames[i], sharedReporter, true); t.addEventListener(evtNames[i], sharedReporter, false); } } }
tests/dom/events/Test.java
/* $Id$ */ /* * The Apache Software License, Version 1.1 * * Copyright (c) 2000-2002 The Apache Software Foundation. All rights * reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in * the documentation and/or other materials provided with the * distribution. * * 3. The end-user documentation included with the redistribution, * if any, must include the following acknowledgment: * "This product includes software developed by the * Apache Software Foundation (http://www.apache.org/)." * Alternately, this acknowledgment may appear in the software itself, * if and wherever such third-party acknowledgments normally appear. * * 4. The names "Xerces" and "Apache Software Foundation" must * not be used to endorse or promote products derived from this * software without prior written permission. For written * permission, please contact apache\@apache.org. * * 5. Products derived from this software may not be called "Apache", * nor may "Apache" appear in their name, without prior written * permission of the Apache Software Foundation. * * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF * SUCH DAMAGE. * ==================================================================== * * This software consists of voluntary contributions made by many * individuals on behalf of the Apache Software Foundation, and was * originally based on software copyright (c) 1999, International * Business Machines, Inc., http://www.ibm.com . For more information * on the Apache Software Foundation, please see * <http://www.apache.org/>. */ package dom.events; import org.w3c.dom.*; import org.w3c.dom.events.*; public class Test { EventReporter sharedReporter=new EventReporter(); public static void main(String[] args) { Test met=new Test(); met.runTest(); } void runTest() { Document doc=new org.apache.xerces.dom.DocumentImpl(); reportAllMutations(doc); Element root=addNoisyElement(doc,doc,0); Element e=null; int i; // Individual nodes e=addNoisyElement(doc,root,0); Attr a=addNoisyAttr(doc,e,0); a.setNodeValue("Updated A0 of E0, prepare to be acidulated."); NamedNodeMap nnm=e.getAttributes(); nnm.removeNamedItem(a.getName()); nnm.setNamedItem(a); // InsertedInto/RemovedFrom tests. // ***** These do not currently cross the Attr/Element barrier. // DOM spec is pretty clear on that, but this may not be the intent. System.out.println(); System.out.println("Add/remove a preconstructed tree; tests AddedToDocument"); System.out.println(); sharedReporter.off(); Element lateAdd=doc.createElement("lateAdd"); reportAllMutations(lateAdd); e=lateAdd; for(i=0;i<2;++i) { e=addNoisyElement(doc,e,i); addNoisyAttr(doc,e,i); } sharedReporter.on(); root.appendChild(lateAdd); root.removeChild(lateAdd); System.out.println(); System.out.println("Replace a preconstructed tree; tests AddedToDocument"); System.out.println(); sharedReporter.off(); Node e0=root.replaceChild(lateAdd,root.getFirstChild()); sharedReporter.on(); root.replaceChild(e0,lateAdd); sharedReporter.off(); Text t = addNoisyText(doc, root.getFirstChild(), "fo"); sharedReporter.on(); t.insertData(1, "o"); System.out.println("Done"); } Element addNoisyElement(Document doc,Node parent,int index) { String nodeName="Root"; if(parent!=doc) nodeName=parent.getNodeName()+"_E"+index; Element e=doc.createElement(nodeName); reportAllMutations(e); parent.appendChild(e); return e; } Attr addNoisyAttr(Document doc,Element parent,int index) { String attrName=parent.getNodeName()+"_A"+index; Attr a=doc.createAttribute(attrName); reportAllMutations(a); a.setNodeValue("Initialized A"+index+" of "+parent.getNodeName()); parent.setAttributeNode(a); return a; } Text addNoisyText(Document doc, Node parent, String data) { Text t = doc.createTextNode(data); reportAllMutations(t); parent.appendChild(t); return t; } void reportAllMutations(Node n) { String[] evtNames={ "DOMSubtreeModified","DOMAttrModified","DOMCharacterDataModified", "DOMNodeInserted","DOMNodeRemoved", "DOMNodeInsertedIntoDocument","DOMNodeRemovedFromDocument", }; EventTarget t=(EventTarget)n; for(int i=evtNames.length-1; i>=0; --i) { t.addEventListener(evtNames[i], sharedReporter, true); t.addEventListener(evtNames[i], sharedReporter, false); } } }
added a call to setAttribute() git-svn-id: 21df804813e9d3638e43477f308dd0be51e5f30f@319103 13f79535-47bb-0310-9956-ffa450edef68
tests/dom/events/Test.java
added a call to setAttribute()
Java
apache-2.0
191dbcecb6aa56993ef97d47b65aceb00f225bbe
0
nikeshmhr/unitime,rafati/unitime,sktoo/timetabling-system-,sktoo/timetabling-system-,UniTime/unitime,UniTime/unitime,rafati/unitime,UniTime/unitime,maciej-zygmunt/unitime,zuzanamullerova/unitime,maciej-zygmunt/unitime,maciej-zygmunt/unitime,rafati/unitime,sktoo/timetabling-system-,nikeshmhr/unitime,zuzanamullerova/unitime,zuzanamullerova/unitime,nikeshmhr/unitime
/* * UniTime 3.2 (University Timetabling Application) * Copyright (C) 2010, UniTime LLC, and individual contributors * as indicated by the @authors tag. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. * */ package org.unitime.timetable.util.queue; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeSet; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import javax.servlet.http.HttpServletRequest; import org.unitime.commons.Email; import org.unitime.timetable.ApplicationProperties; import org.unitime.timetable.form.ExamPdfReportForm; import org.unitime.timetable.model.DepartmentalInstructor; import org.unitime.timetable.model.ExamOwner; import org.unitime.timetable.model.ExamType; import org.unitime.timetable.model.ManagerRole; import org.unitime.timetable.model.Session; import org.unitime.timetable.model.Student; import org.unitime.timetable.model.SubjectArea; import org.unitime.timetable.model.TimetableManager; import org.unitime.timetable.model.dao.ExamDAO; import org.unitime.timetable.model.dao.ExamTypeDAO; import org.unitime.timetable.model.dao.SessionDAO; import org.unitime.timetable.model.dao.SubjectAreaDAO; import org.unitime.timetable.reports.exam.InstructorExamReport; import org.unitime.timetable.reports.exam.PdfLegacyExamReport; import org.unitime.timetable.reports.exam.StudentExamReport; import org.unitime.timetable.security.UserContext; import org.unitime.timetable.security.rights.Right; import org.unitime.timetable.solver.exam.ExamSolverProxy; import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo; import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamInstructorInfo; import org.unitime.timetable.util.Constants; /** * * @author Tomas Muller * */ public class PdfExamReportQueueItem extends QueueItem { public static String TYPE = "PDF Exam Report"; private ExamPdfReportForm iForm; private String iUrl = null; private ExamSolverProxy iExamSolver; private String iName = null; private double iProgress = 0; public PdfExamReportQueueItem(Session session, UserContext owner, ExamPdfReportForm form, HttpServletRequest request, ExamSolverProxy examSolver) { super(session, owner); iForm = form; iUrl = request.getScheme()+"://"+request.getServerName()+":"+request.getServerPort()+request.getContextPath(); iExamSolver = examSolver; iName = ExamTypeDAO.getInstance().get(iForm.getExamType()).getLabel() + " "; for (int i=0;i<iForm.getReports().length;i++) { if (i > 0) iName += ", "; iName += iForm.getReports()[i]; } if (!iForm.getAll()) { iName += " ("; for (int i=0;i<iForm.getSubjects().length;i++) { SubjectArea subject = new SubjectAreaDAO().get(Long.valueOf(iForm.getSubjects()[i])); if (i > 0) iName += ", "; iName += subject.getSubjectAreaAbbreviation(); } iName += ")"; } } @Override public void execute() { /* Logger repLog = Logger.getLogger("org.unitime.timetable.reports.exam"); Appender myAppender = new AppenderSkeleton() { @Override public boolean requiresLayout() { return false; } @Override public void close() { } @Override protected void append(LoggingEvent event) { if (event.getMessage() == null) return; if (event.getLevel().toInt() >= Priority.ERROR_INT) { error(event.getMessage().toString()); } else if (event.getLevel().toInt() >= Priority.WARN_INT) { warn(event.getMessage().toString()); } else log(event.getMessage().toString()); } }; repLog.addAppender(myAppender); */ org.hibernate.Session hibSession = ExamDAO.getInstance().getSession(); createReports(hibSession); if (hibSession.isOpen()) hibSession.close(); //repLog.removeAppender(myAppender); } private void createReports(org.hibernate.Session hibSession) { try { iProgress = 0; setStatus("Loading exams..."); TreeSet<ExamAssignmentInfo> exams = null; if (iExamSolver!=null && iExamSolver.getExamTypeId().equals(iForm.getExamType()) && "true".equals(ApplicationProperties.getProperty("tmtbl.exam.pdfReports.canUseSolution","false"))) { exams = new TreeSet(iExamSolver.getAssignedExams()); if (iForm.getIgnoreEmptyExams()) for (Iterator<ExamAssignmentInfo> i=exams.iterator();i.hasNext();) { if (i.next().getStudentIds().isEmpty()) i.remove(); } if ("true".equals(ApplicationProperties.getProperty("tmtbl.exam.pdfReports.useSolution.preloadCrosslistedExams","true"))) { setStatus(" Fetching exams..."); hibSession.createQuery( "select o from Exam x inner join x.owners o where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId" ).setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list(); setStatus(" Fetching related objects (class)..."); hibSession.createQuery( "select c from Class_ c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:classType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("classType", ExamOwner.sOwnerTypeClass).setCacheable(true).list(); setStatus(" Fetching related objects (config)..."); hibSession.createQuery( "select c from InstrOfferingConfig c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:configType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("configType", ExamOwner.sOwnerTypeConfig).setCacheable(true).list(); setStatus(" Fetching related objects (course)..."); hibSession.createQuery( "select c from CourseOffering c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:courseType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("courseType", ExamOwner.sOwnerTypeCourse).setCacheable(true).list(); setStatus(" Fetching related objects (offering)..."); hibSession.createQuery( "select c from InstructionalOffering c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:offeringType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("offeringType", ExamOwner.sOwnerTypeOffering).setCacheable(true).list(); Hashtable<Long,Hashtable<Long,Set<Long>>> owner2course2students = new Hashtable(); setStatus(" Loading students (class)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.clazz c "+ "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeClass+" and "+ "o.ownerId=c.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } setStatus(" Loading students (config)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.clazz c " + "inner join c.schedulingSubpart.instrOfferingConfig ioc " + "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeConfig+" and "+ "o.ownerId=ioc.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } setStatus(" Loading students (course)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.courseOffering co " + "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeCourse+" and "+ "o.ownerId=co.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } setStatus(" Loading students (offering)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.courseOffering.instructionalOffering io " + "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeOffering+" and "+ "o.ownerId=io.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } for (ExamAssignmentInfo exam: exams) { exam.createSectionsIncludeCrosslistedDummies(owner2course2students); } } } else { exams = PdfLegacyExamReport.loadExams(getSessionId(), iForm.getExamType(), true, iForm.getIgnoreEmptyExams(), true); } iProgress = 0.1; /* if (iForm.getAll()) { for (Iterator i=Exam.findAll(session.getUniqueId(), iForm.getExamType()).iterator();i.hasNext();) { exams.add(new ExamAssignmentInfo((Exam)i.next())); } } else { for (int i=0;i<iForm.getSubjects().length;i++) { SubjectArea subject = new SubjectAreaDAO().get(Long.valueOf(iForm.getSubjects()[i])); TreeSet<ExamAssignmentInfo> examsThisSubject = new TreeSet(); for (Iterator j=Exam.findExamsOfSubjectArea(subject.getUniqueId(), iForm.getExamType()).iterator();j.hasNext();) { examsThisSubject.add(new ExamAssignmentInfo((Exam)j.next())); } examsPerSubject.put(subject, examsThisSubject); } } */ Hashtable<String,File> output = new Hashtable(); Hashtable<SubjectArea,Hashtable<String,File>> outputPerSubject = new Hashtable(); Hashtable<ExamInstructorInfo,File> ireports = null; Hashtable<Student,File> sreports = null; Session session = getSession(); for (int i=0;i<iForm.getReports().length;i++) { iProgress = 0.1 + (0.8 / iForm.getReports().length) * i; setStatus("Generating "+iForm.getReports()[i]+"..."); Class reportClass = ExamPdfReportForm.sRegisteredReports.get(iForm.getReports()[i]); String reportName = null; for (Map.Entry<String, Class> entry : PdfLegacyExamReport.sRegisteredReports.entrySet()) if (entry.getValue().equals(reportClass)) reportName = entry.getKey(); if (reportName==null) reportName = "r"+(i+1); String name = session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+"_"+reportName; if (iForm.getAll()) { File file = ApplicationProperties.getTempFile(name, (iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")); log("&nbsp;&nbsp;Writing <a href='temp/"+file.getName()+"'>"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")+"</a>... ("+exams.size()+" exams)"); PdfLegacyExamReport report = (PdfLegacyExamReport)reportClass. getConstructor(int.class, File.class, Session.class, ExamType.class, SubjectArea.class, Collection.class). newInstance(iForm.getModeIdx(), file, new SessionDAO().get(session.getUniqueId()), ExamTypeDAO.getInstance().get(iForm.getExamType()), null, exams); report.setDirect(iForm.getDirect()); report.setM2d(iForm.getM2d()); report.setBtb(iForm.getBtb()); report.setDispRooms(iForm.getDispRooms()); report.setNoRoom(iForm.getNoRoom()); report.setTotals(iForm.getTotals()); report.setLimit(iForm.getLimit()==null || iForm.getLimit().length()==0?-1:Integer.parseInt(iForm.getLimit())); report.setRoomCode(iForm.getRoomCodes()); report.setDispLimits(iForm.getDispLimit()); report.setSince(iForm.getSince()==null || iForm.getSince().length()==0?null:new SimpleDateFormat("MM/dd/yyyy").parse(iForm.getSince())); report.setItype(iForm.getItype()); report.setClassSchedule(iForm.getClassSchedule()); report.printReport(); report.close(); output.put(reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf"),file); if (report instanceof InstructorExamReport && iForm.getEmailInstructors()) { ireports = ((InstructorExamReport)report).printInstructorReports(iForm.getModeIdx(), name, new FileGenerator(name)); } else if (report instanceof StudentExamReport && iForm.getEmailStudents()) { sreports = ((StudentExamReport)report).printStudentReports(iForm.getModeIdx(), name, new FileGenerator(name)); } } else { for (int j=0;j<iForm.getSubjects().length;j++) { SubjectArea subject = new SubjectAreaDAO().get(Long.valueOf(iForm.getSubjects()[j])); File file = ApplicationProperties.getTempFile(name+"_"+subject.getSubjectAreaAbbreviation(), (iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")); int nrExams = 0; for (ExamAssignmentInfo exam : exams) { if (exam.isOfSubjectArea(subject)) nrExams++; } log("&nbsp;&nbsp;Writing <a href='temp/"+file.getName()+"'>"+subject.getSubjectAreaAbbreviation()+"_"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")+"</a>... ("+nrExams+" exams)"); PdfLegacyExamReport report = (PdfLegacyExamReport)reportClass. getConstructor(int.class, File.class, Session.class, ExamType.class, SubjectArea.class, Collection.class). newInstance(iForm.getModeIdx(), file, new SessionDAO().get(session.getUniqueId()), ExamTypeDAO.getInstance().get(iForm.getExamType()), subject, exams); report.setDirect(iForm.getDirect()); report.setM2d(iForm.getM2d()); report.setBtb(iForm.getBtb()); report.setDispRooms(iForm.getDispRooms()); report.setNoRoom(iForm.getNoRoom()); report.setTotals(iForm.getTotals()); report.setLimit(iForm.getLimit()==null || iForm.getLimit().length()==0?-1:Integer.parseInt(iForm.getLimit())); report.setRoomCode(iForm.getRoomCodes()); report.setDispLimits(iForm.getDispLimit()); report.setItype(iForm.getItype()); report.setClassSchedule(iForm.getClassSchedule()); report.printReport(); report.close(); output.put(subject.getSubjectAreaAbbreviation()+"_"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf"),file); Hashtable<String,File> files = outputPerSubject.get(subject); if (files==null) { files = new Hashtable(); outputPerSubject.put(subject,files); } files.put(subject.getSubjectAreaAbbreviation()+"_"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf"),file); if (report instanceof InstructorExamReport && iForm.getEmailInstructors()) { ireports = ((InstructorExamReport)report).printInstructorReports(iForm.getModeIdx(), name, new FileGenerator(name)); } else if (report instanceof StudentExamReport && iForm.getEmailStudents()) { sreports = ((StudentExamReport)report).printStudentReports(iForm.getModeIdx(), name, new FileGenerator(name)); } } } } iProgress = 0.9; byte[] buffer = new byte[32*1024]; int len = 0; if (output.isEmpty()) log("<font color='orange'>No report generated.</font>"); else if (iForm.getEmail()) { setStatus("Sending email(s)..."); if (iForm.getEmailDeputies()) { Hashtable<TimetableManager,Hashtable<String,File>> files2send = new Hashtable(); for (Map.Entry<SubjectArea, Hashtable<String,File>> entry : outputPerSubject.entrySet()) { if (entry.getKey().getDepartment().getTimetableManagers().isEmpty()) log("<font color='orange'>&nbsp;&nbsp;No manager associated with subject area "+entry.getKey().getSubjectAreaAbbreviation()+ " ("+entry.getKey().getDepartment().getLabel()+")</font>"); for (Iterator i=entry.getKey().getDepartment().getTimetableManagers().iterator();i.hasNext();) { TimetableManager g = (TimetableManager)i.next(); boolean receiveEmail = true; for (ManagerRole mr : (Set<ManagerRole>)g.getManagerRoles()){ if (!mr.getRole().hasRight(Right.DepartmentIndependent)) { receiveEmail = mr.isReceiveEmails() == null?false:mr.isReceiveEmails().booleanValue(); break; } } if (receiveEmail){ if (g.getEmailAddress()==null || g.getEmailAddress().length()==0) { log("<font color='orange'>&nbsp;&nbsp;Manager "+g.getName()+" has no email address.</font>"); } else { Hashtable<String,File> files = files2send.get(g); if (files==null) { files = new Hashtable<String,File>(); files2send.put(g, files); } files.putAll(entry.getValue()); } } } } if (files2send.isEmpty()) { log("<font color='red'>Nothing to send.</font>"); } else { Set<TimetableManager> managers = files2send.keySet(); while (!managers.isEmpty()) { TimetableManager manager = managers.iterator().next(); Hashtable<String,File> files = files2send.get(manager); managers.remove(manager); log("Sending email to "+manager.getName()+" ("+manager.getEmailAddress()+")..."); try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); mail.addRecipient(manager.getEmailAddress(), manager.getName()); for (Iterator<TimetableManager> i=managers.iterator();i.hasNext();) { TimetableManager m = (TimetableManager)i.next(); if (files.equals(files2send.get(m))) { log("&nbsp;&nbsp;Including "+m.getName()+" ("+m.getEmailAddress()+")"); mail.addRecipient(m.getEmailAddress(),m.getName()); i.remove(); } } if (iForm.getAddress()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getAddress(),";,\n\r ");s.hasMoreTokens();) mail.addRecipient(s.nextToken(), null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); for (Map.Entry<String, File> entry : files.entrySet()) { mail.addAttachement(entry.getValue(), session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+"_"+entry.getKey()); log("&nbsp;&nbsp;Attaching <a href='temp/"+entry.getValue().getName()+"'>"+entry.getKey()+"</a>"); } mail.send(); log("Email sent."); } catch (Exception e) { log("<font color='red'>Unable to send email: "+e.getMessage()+"</font>"); setError(e); } } } } else { try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); if (iForm.getAddress()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getAddress(),";,\n\r ");s.hasMoreTokens();) mail.addRecipient(s.nextToken(), null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); for (Map.Entry<String, File> entry : output.entrySet()) { mail.addAttachement(entry.getValue(), session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+"_"+entry.getKey()); } mail.send(); log("Email sent."); } catch (Exception e) { log("<font color='red'>Unable to send email: "+e.getMessage()+"</font>"); setError(e); } } if (iForm.getEmailInstructors() && ireports!=null && !ireports.isEmpty()) { setStatus("Emailing instructors..."); for (ExamInstructorInfo instructor : new TreeSet<ExamInstructorInfo>(ireports.keySet())) { File report = ireports.get(instructor); String email = instructor.getInstructor().getEmail(); if (email==null || email.length()==0) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+instructor.getName()+"</a> -- instructor has no email address.</font>"); continue; } try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/exams.do\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); mail.addRecipient(email, null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); mail.addAttachement(report, session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?".txt":".pdf")); mail.send(); log("&nbsp;&nbsp;An email was sent to <a href='temp/"+report.getName()+"'>"+instructor.getName()+"</a>."); } catch (Exception e) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+instructor.getName()+"</a> -- "+e.getMessage()+".</font>"); setError(e); } } log("Emails sent."); } if (iForm.getEmailStudents() && sreports!=null && !sreports.isEmpty()) { setStatus("Emailing students..."); for (Student student : new TreeSet<Student>(sreports.keySet())) { File report = sreports.get(student); String email = student.getEmail(); if (email==null || email.length()==0) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+student.getName(DepartmentalInstructor.sNameFormatLastFist)+"</a> -- student has no email address.</font>"); continue; } try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/exams.do\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); mail.addRecipient(email, null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); mail.addAttachement(report, session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?".txt":".pdf")); mail.send(); log("&nbsp;&nbsp;An email was sent to <a href='temp/"+report.getName()+"'>"+student.getName(DepartmentalInstructor.sNameFormatLastFist)+"</a>."); } catch (Exception e) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+student.getName(DepartmentalInstructor.sNameFormatLastFist)+"</a> -- "+e.getMessage()+".</font>"); setError(e); } } log("Emails sent."); } } if (output.isEmpty()) { throw new Exception("Nothing generated."); } else if (output.size()==1) { setOutput(output.elements().nextElement()); } else { FileInputStream fis = null; ZipOutputStream zip = null; try { File zipFile = ApplicationProperties.getTempFile(session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference(), "zip"); log("Writing <a href='temp/"+zipFile.getName()+"'>"+session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+".zip</a>..."); zip = new ZipOutputStream(new FileOutputStream(zipFile)); for (Map.Entry<String, File> entry : output.entrySet()) { zip.putNextEntry(new ZipEntry(entry.getKey())); fis = new FileInputStream(entry.getValue()); while ((len=fis.read(buffer))>0) zip.write(buffer, 0, len); fis.close(); fis = null; zip.closeEntry(); } zip.flush(); zip.close(); setOutput(zipFile); } catch (IOException e) { if (fis!=null) fis.close(); if (zip!=null) zip.close(); setError(e); } } iProgress = 1.0; setStatus("All done."); } catch (Exception e) { log("<font color='red'>Process failed: "+e.getMessage()+" (exception "+e.getClass().getName()+")</font>"); sLog.error(e.getMessage(),e); setError(e); } } @Override public String name() { return iName; } @Override public double progress() { return iProgress; } @Override public String type() { return TYPE; } public static class FileGenerator implements InstructorExamReport.FileGenerator { String iName; public FileGenerator(String name) { iName = name; } public File generate(String prefix, String ext) { return ApplicationProperties.getTempFile(iName+"_"+prefix, ext); } } }
JavaSource/org/unitime/timetable/util/queue/PdfExamReportQueueItem.java
/* * UniTime 3.2 (University Timetabling Application) * Copyright (C) 2010, UniTime LLC, and individual contributors * as indicated by the @authors tag. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. * */ package org.unitime.timetable.util.queue; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.Collection; import java.util.HashSet; import java.util.Hashtable; import java.util.Iterator; import java.util.Map; import java.util.Set; import java.util.StringTokenizer; import java.util.TreeSet; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; import javax.servlet.http.HttpServletRequest; import org.unitime.commons.Email; import org.unitime.timetable.ApplicationProperties; import org.unitime.timetable.form.ExamPdfReportForm; import org.unitime.timetable.model.DepartmentalInstructor; import org.unitime.timetable.model.ExamOwner; import org.unitime.timetable.model.ExamType; import org.unitime.timetable.model.ManagerRole; import org.unitime.timetable.model.Session; import org.unitime.timetable.model.Student; import org.unitime.timetable.model.SubjectArea; import org.unitime.timetable.model.TimetableManager; import org.unitime.timetable.model.dao.ExamDAO; import org.unitime.timetable.model.dao.ExamTypeDAO; import org.unitime.timetable.model.dao.SessionDAO; import org.unitime.timetable.model.dao.SubjectAreaDAO; import org.unitime.timetable.reports.exam.InstructorExamReport; import org.unitime.timetable.reports.exam.PdfLegacyExamReport; import org.unitime.timetable.reports.exam.StudentExamReport; import org.unitime.timetable.security.UserContext; import org.unitime.timetable.security.rights.Right; import org.unitime.timetable.solver.exam.ExamSolverProxy; import org.unitime.timetable.solver.exam.ui.ExamAssignmentInfo; import org.unitime.timetable.solver.exam.ui.ExamInfo.ExamInstructorInfo; import org.unitime.timetable.util.Constants; /** * * @author Tomas Muller * */ public class PdfExamReportQueueItem extends QueueItem { public static String TYPE = "PDF Exam Report"; private ExamPdfReportForm iForm; private String iUrl = null; private ExamSolverProxy iExamSolver; private String iName = null; private double iProgress = 0; public PdfExamReportQueueItem(Session session, UserContext owner, ExamPdfReportForm form, HttpServletRequest request, ExamSolverProxy examSolver) { super(session, owner); iForm = form; iUrl = request.getScheme()+"://"+request.getServerName()+":"+request.getServerPort()+request.getContextPath(); iExamSolver = examSolver; iName = ExamTypeDAO.getInstance().get(iForm.getExamType()).getLabel() + " "; for (int i=0;i<iForm.getReports().length;i++) { if (i > 0) iName += ", "; iName += iForm.getReports()[i]; } if (!iForm.getAll()) { iName += " ("; for (int i=0;i<iForm.getSubjects().length;i++) { SubjectArea subject = new SubjectAreaDAO().get(Long.valueOf(iForm.getSubjects()[i])); if (i > 0) iName += ", "; iName += subject.getSubjectAreaAbbreviation(); } iName += ")"; } } @Override public void execute() { /* Logger repLog = Logger.getLogger("org.unitime.timetable.reports.exam"); Appender myAppender = new AppenderSkeleton() { @Override public boolean requiresLayout() { return false; } @Override public void close() { } @Override protected void append(LoggingEvent event) { if (event.getMessage() == null) return; if (event.getLevel().toInt() >= Priority.ERROR_INT) { error(event.getMessage().toString()); } else if (event.getLevel().toInt() >= Priority.WARN_INT) { warn(event.getMessage().toString()); } else log(event.getMessage().toString()); } }; repLog.addAppender(myAppender); */ org.hibernate.Session hibSession = ExamDAO.getInstance().getSession(); createReports(hibSession); if (hibSession.isOpen()) hibSession.close(); //repLog.removeAppender(myAppender); } private void createReports(org.hibernate.Session hibSession) { try { iProgress = 0; setStatus("Loading exams..."); TreeSet<ExamAssignmentInfo> exams = null; if (iExamSolver!=null && iExamSolver.getExamTypeId().equals(iForm.getExamType()) && "true".equals(ApplicationProperties.getProperty("tmtbl.exam.pdfReports.canUseSolution","false"))) { exams = new TreeSet(iExamSolver.getAssignedExams()); if (iForm.getIgnoreEmptyExams()) for (Iterator<ExamAssignmentInfo> i=exams.iterator();i.hasNext();) { if (i.next().getStudentIds().isEmpty()) i.remove(); } if ("true".equals(ApplicationProperties.getProperty("tmtbl.exam.pdfReports.useSolution.preloadCrosslistedExams","true"))) { setStatus(" Fetching exams..."); hibSession.createQuery( "select o from Exam x inner join x.owners o where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId" ).setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list(); setStatus(" Fetching related objects (class)..."); hibSession.createQuery( "select c from Class_ c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:classType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("classType", ExamOwner.sOwnerTypeClass).setCacheable(true).list(); setStatus(" Fetching related objects (config)..."); hibSession.createQuery( "select c from InstrOfferingConfig c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:configType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("configType", ExamOwner.sOwnerTypeConfig).setCacheable(true).list(); setStatus(" Fetching related objects (course)..."); hibSession.createQuery( "select c from CourseOffering c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:courseType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("courseType", ExamOwner.sOwnerTypeCourse).setCacheable(true).list(); setStatus(" Fetching related objects (offering)..."); hibSession.createQuery( "select c from InstructionalOffering c, ExamOwner o where o.exam.session.uniqueId=:sessionId and o.exam.examType.uniqueId=:examTypeId and o.ownerType=:offeringType and c.uniqueId=o.ownerId") .setLong("sessionId", iExamSolver.getSessionId()) .setLong("examTypeId", iExamSolver.getExamTypeId()) .setInteger("offeringType", ExamOwner.sOwnerTypeOffering).setCacheable(true).list(); Hashtable<Long,Hashtable<Long,Set<Long>>> owner2course2students = new Hashtable(); setStatus(" Loading students (class)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.clazz c "+ "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeClass+" and "+ "o.ownerId=c.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } setStatus(" Loading students (config)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.clazz c " + "inner join c.schedulingSubpart.instrOfferingConfig ioc " + "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeConfig+" and "+ "o.ownerId=ioc.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } setStatus(" Loading students (course)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.courseOffering co " + "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeCourse+" and "+ "o.ownerId=co.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } setStatus(" Loading students (offering)..."); for (Iterator i= hibSession.createQuery( "select o.uniqueId, e.student.uniqueId, e.courseOffering.uniqueId from "+ "Exam x inner join x.owners o, "+ "StudentClassEnrollment e inner join e.courseOffering.instructionalOffering io " + "where x.session.uniqueId=:sessionId and x.examType.uniqueId=:examTypeId and "+ "o.ownerType="+org.unitime.timetable.model.ExamOwner.sOwnerTypeOffering+" and "+ "o.ownerId=io.uniqueId").setLong("sessionId", iExamSolver.getSessionId()).setLong("examTypeId", iExamSolver.getExamTypeId()).setCacheable(true).list().iterator();i.hasNext();) { Object[] o = (Object[])i.next(); Long ownerId = (Long)o[0]; Long studentId = (Long)o[1]; Long courseId = (Long)o[2]; Hashtable<Long, Set<Long>> course2students = owner2course2students.get(ownerId); if (course2students == null) { course2students = new Hashtable<Long, Set<Long>>(); owner2course2students.put(ownerId, course2students); } Set<Long> studentsOfCourse = course2students.get(courseId); if (studentsOfCourse == null) { studentsOfCourse = new HashSet<Long>(); course2students.put(courseId, studentsOfCourse); } studentsOfCourse.add(studentId); } for (ExamAssignmentInfo exam: exams) { exam.createSectionsIncludeCrosslistedDummies(owner2course2students); } } } else { exams = PdfLegacyExamReport.loadExams(getSessionId(), iForm.getExamType(), true, iForm.getIgnoreEmptyExams(), true); } iProgress = 0.1; /* if (iForm.getAll()) { for (Iterator i=Exam.findAll(session.getUniqueId(), iForm.getExamType()).iterator();i.hasNext();) { exams.add(new ExamAssignmentInfo((Exam)i.next())); } } else { for (int i=0;i<iForm.getSubjects().length;i++) { SubjectArea subject = new SubjectAreaDAO().get(Long.valueOf(iForm.getSubjects()[i])); TreeSet<ExamAssignmentInfo> examsThisSubject = new TreeSet(); for (Iterator j=Exam.findExamsOfSubjectArea(subject.getUniqueId(), iForm.getExamType()).iterator();j.hasNext();) { examsThisSubject.add(new ExamAssignmentInfo((Exam)j.next())); } examsPerSubject.put(subject, examsThisSubject); } } */ Hashtable<String,File> output = new Hashtable(); Hashtable<SubjectArea,Hashtable<String,File>> outputPerSubject = new Hashtable(); Hashtable<ExamInstructorInfo,File> ireports = null; Hashtable<Student,File> sreports = null; Session session = getSession(); for (int i=0;i<iForm.getReports().length;i++) { iProgress = 0.1 + (0.8 / iForm.getReports().length) * i; setStatus("Generating "+iForm.getReports()[i]+"..."); Class reportClass = ExamPdfReportForm.sRegisteredReports.get(iForm.getReports()[i]); String reportName = null; for (Map.Entry<String, Class> entry : PdfLegacyExamReport.sRegisteredReports.entrySet()) if (entry.getValue().equals(reportClass)) reportName = entry.getKey(); if (reportName==null) reportName = "r"+(i+1); String name = session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+"_"+reportName; if (iForm.getAll()) { File file = ApplicationProperties.getTempFile(name, (iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")); log("&nbsp;&nbsp;Writing <a href='temp/"+file.getName()+"'>"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")+"</a>... ("+exams.size()+" exams)"); PdfLegacyExamReport report = (PdfLegacyExamReport)reportClass. getConstructor(int.class, File.class, Session.class, ExamType.class, SubjectArea.class, Collection.class). newInstance(iForm.getModeIdx(), file, new SessionDAO().get(session.getUniqueId()), ExamTypeDAO.getInstance().get(iForm.getExamType()), null, exams); report.setDirect(iForm.getDirect()); report.setM2d(iForm.getM2d()); report.setBtb(iForm.getBtb()); report.setDispRooms(iForm.getDispRooms()); report.setNoRoom(iForm.getNoRoom()); report.setTotals(iForm.getTotals()); report.setLimit(iForm.getLimit()==null || iForm.getLimit().length()==0?-1:Integer.parseInt(iForm.getLimit())); report.setRoomCode(iForm.getRoomCodes()); report.setDispLimits(iForm.getDispLimit()); report.setSince(iForm.getSince()==null || iForm.getSince().length()==0?null:new SimpleDateFormat("MM/dd/yyyy").parse(iForm.getSince())); report.setItype(iForm.getItype()); report.setClassSchedule(iForm.getClassSchedule()); report.printReport(); report.close(); output.put(reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf"),file); if (report instanceof InstructorExamReport && iForm.getEmailInstructors()) { ireports = ((InstructorExamReport)report).printInstructorReports(iForm.getModeIdx(), name, new FileGenerator(name)); } else if (report instanceof StudentExamReport && iForm.getEmailStudents()) { sreports = ((StudentExamReport)report).printStudentReports(iForm.getModeIdx(), name, new FileGenerator(name)); } } else { for (int j=0;j<iForm.getSubjects().length;j++) { SubjectArea subject = new SubjectAreaDAO().get(Long.valueOf(iForm.getSubjects()[j])); File file = ApplicationProperties.getTempFile(name+"_"+subject.getSubjectAreaAbbreviation(), (iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")); int nrExams = 0; for (ExamAssignmentInfo exam : exams) { if (exam.isOfSubjectArea(subject)) nrExams++; } log("&nbsp;&nbsp;Writing <a href='temp/"+file.getName()+"'>"+subject.getSubjectAreaAbbreviation()+"_"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf")+"</a>... ("+nrExams+" exams)"); PdfLegacyExamReport report = (PdfLegacyExamReport)reportClass. getConstructor(int.class, File.class, Session.class, int.class, SubjectArea.class, Collection.class). newInstance(iForm.getModeIdx(), file, new SessionDAO().get(session.getUniqueId()), iForm.getExamType(), subject, exams); report.setDirect(iForm.getDirect()); report.setM2d(iForm.getM2d()); report.setBtb(iForm.getBtb()); report.setDispRooms(iForm.getDispRooms()); report.setNoRoom(iForm.getNoRoom()); report.setTotals(iForm.getTotals()); report.setLimit(iForm.getLimit()==null || iForm.getLimit().length()==0?-1:Integer.parseInt(iForm.getLimit())); report.setRoomCode(iForm.getRoomCodes()); report.setDispLimits(iForm.getDispLimit()); report.setItype(iForm.getItype()); report.setClassSchedule(iForm.getClassSchedule()); report.printReport(); report.close(); output.put(subject.getSubjectAreaAbbreviation()+"_"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf"),file); Hashtable<String,File> files = outputPerSubject.get(subject); if (files==null) { files = new Hashtable(); outputPerSubject.put(subject,files); } files.put(subject.getSubjectAreaAbbreviation()+"_"+reportName+"."+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?"txt":"pdf"),file); if (report instanceof InstructorExamReport && iForm.getEmailInstructors()) { ireports = ((InstructorExamReport)report).printInstructorReports(iForm.getModeIdx(), name, new FileGenerator(name)); } else if (report instanceof StudentExamReport && iForm.getEmailStudents()) { sreports = ((StudentExamReport)report).printStudentReports(iForm.getModeIdx(), name, new FileGenerator(name)); } } } } iProgress = 0.9; byte[] buffer = new byte[32*1024]; int len = 0; if (output.isEmpty()) log("<font color='orange'>No report generated.</font>"); else if (iForm.getEmail()) { setStatus("Sending email(s)..."); if (iForm.getEmailDeputies()) { Hashtable<TimetableManager,Hashtable<String,File>> files2send = new Hashtable(); for (Map.Entry<SubjectArea, Hashtable<String,File>> entry : outputPerSubject.entrySet()) { if (entry.getKey().getDepartment().getTimetableManagers().isEmpty()) log("<font color='orange'>&nbsp;&nbsp;No manager associated with subject area "+entry.getKey().getSubjectAreaAbbreviation()+ " ("+entry.getKey().getDepartment().getLabel()+")</font>"); for (Iterator i=entry.getKey().getDepartment().getTimetableManagers().iterator();i.hasNext();) { TimetableManager g = (TimetableManager)i.next(); boolean receiveEmail = true; for (ManagerRole mr : (Set<ManagerRole>)g.getManagerRoles()){ if (!mr.getRole().hasRight(Right.DepartmentIndependent)) { receiveEmail = mr.isReceiveEmails() == null?false:mr.isReceiveEmails().booleanValue(); break; } } if (receiveEmail){ if (g.getEmailAddress()==null || g.getEmailAddress().length()==0) { log("<font color='orange'>&nbsp;&nbsp;Manager "+g.getName()+" has no email address.</font>"); } else { Hashtable<String,File> files = files2send.get(g); if (files==null) { files = new Hashtable<String,File>(); files2send.put(g, files); } files.putAll(entry.getValue()); } } } } if (files2send.isEmpty()) { log("<font color='red'>Nothing to send.</font>"); } else { Set<TimetableManager> managers = files2send.keySet(); while (!managers.isEmpty()) { TimetableManager manager = managers.iterator().next(); Hashtable<String,File> files = files2send.get(manager); managers.remove(manager); log("Sending email to "+manager.getName()+" ("+manager.getEmailAddress()+")..."); try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); mail.addRecipient(manager.getEmailAddress(), manager.getName()); for (Iterator<TimetableManager> i=managers.iterator();i.hasNext();) { TimetableManager m = (TimetableManager)i.next(); if (files.equals(files2send.get(m))) { log("&nbsp;&nbsp;Including "+m.getName()+" ("+m.getEmailAddress()+")"); mail.addRecipient(m.getEmailAddress(),m.getName()); i.remove(); } } if (iForm.getAddress()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getAddress(),";,\n\r ");s.hasMoreTokens();) mail.addRecipient(s.nextToken(), null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); for (Map.Entry<String, File> entry : files.entrySet()) { mail.addAttachement(entry.getValue(), session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+"_"+entry.getKey()); log("&nbsp;&nbsp;Attaching <a href='temp/"+entry.getValue().getName()+"'>"+entry.getKey()+"</a>"); } mail.send(); log("Email sent."); } catch (Exception e) { log("<font color='red'>Unable to send email: "+e.getMessage()+"</font>"); setError(e); } } } } else { try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); if (iForm.getAddress()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getAddress(),";,\n\r ");s.hasMoreTokens();) mail.addRecipient(s.nextToken(), null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); for (Map.Entry<String, File> entry : output.entrySet()) { mail.addAttachement(entry.getValue(), session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+"_"+entry.getKey()); } mail.send(); log("Email sent."); } catch (Exception e) { log("<font color='red'>Unable to send email: "+e.getMessage()+"</font>"); setError(e); } } if (iForm.getEmailInstructors() && ireports!=null && !ireports.isEmpty()) { setStatus("Emailing instructors..."); for (ExamInstructorInfo instructor : new TreeSet<ExamInstructorInfo>(ireports.keySet())) { File report = ireports.get(instructor); String email = instructor.getInstructor().getEmail(); if (email==null || email.length()==0) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+instructor.getName()+"</a> -- instructor has no email address.</font>"); continue; } try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/exams.do\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); mail.addRecipient(email, null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); mail.addAttachement(report, session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?".txt":".pdf")); mail.send(); log("&nbsp;&nbsp;An email was sent to <a href='temp/"+report.getName()+"'>"+instructor.getName()+"</a>."); } catch (Exception e) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+instructor.getName()+"</a> -- "+e.getMessage()+".</font>"); setError(e); } } log("Emails sent."); } if (iForm.getEmailStudents() && sreports!=null && !sreports.isEmpty()) { setStatus("Emailing students..."); for (Student student : new TreeSet<Student>(sreports.keySet())) { File report = sreports.get(student); String email = student.getEmail(); if (email==null || email.length()==0) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+student.getName(DepartmentalInstructor.sNameFormatLastFist)+"</a> -- student has no email address.</font>"); continue; } try { Email mail = Email.createEmail(); mail.setSubject(iForm.getSubject()==null?"Examination Report":iForm.getSubject()); mail.setText((iForm.getMessage()==null?"":iForm.getMessage()+"\r\n\r\n")+ "For an up-to-date report, please visit "+ iUrl+"/exams.do\r\n\r\n"+ "This email was automatically generated by "+ "UniTime "+Constants.getVersion()+ " (Univesity Timetabling Application, http://www.unitime.org)."); mail.addRecipient(email, null); if (iForm.getCc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getCc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientCC(s.nextToken(), null); if (iForm.getBcc()!=null) for (StringTokenizer s=new StringTokenizer(iForm.getBcc(),";,\n\r ");s.hasMoreTokens();) mail.addRecipientBCC(s.nextToken(), null); mail.addAttachement(report, session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+(iForm.getModeIdx()==PdfLegacyExamReport.sModeText?".txt":".pdf")); mail.send(); log("&nbsp;&nbsp;An email was sent to <a href='temp/"+report.getName()+"'>"+student.getName(DepartmentalInstructor.sNameFormatLastFist)+"</a>."); } catch (Exception e) { log("&nbsp;&nbsp;<font color='orange'>Unable to email <a href='temp/"+report.getName()+"'>"+student.getName(DepartmentalInstructor.sNameFormatLastFist)+"</a> -- "+e.getMessage()+".</font>"); setError(e); } } log("Emails sent."); } } if (output.isEmpty()) { throw new Exception("Nothing generated."); } else if (output.size()==1) { setOutput(output.elements().nextElement()); } else { FileInputStream fis = null; ZipOutputStream zip = null; try { File zipFile = ApplicationProperties.getTempFile(session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference(), "zip"); log("Writing <a href='temp/"+zipFile.getName()+"'>"+session.getAcademicTerm()+session.getSessionStartYear()+ExamTypeDAO.getInstance().get(iForm.getExamType()).getReference()+".zip</a>..."); zip = new ZipOutputStream(new FileOutputStream(zipFile)); for (Map.Entry<String, File> entry : output.entrySet()) { zip.putNextEntry(new ZipEntry(entry.getKey())); fis = new FileInputStream(entry.getValue()); while ((len=fis.read(buffer))>0) zip.write(buffer, 0, len); fis.close(); fis = null; zip.closeEntry(); } zip.flush(); zip.close(); setOutput(zipFile); } catch (IOException e) { if (fis!=null) fis.close(); if (zip!=null) zip.close(); setError(e); } } iProgress = 1.0; setStatus("All done."); } catch (Exception e) { log("<font color='red'>Process failed: "+e.getMessage()+" (exception "+e.getClass().getName()+")</font>"); sLog.error(e.getMessage(),e); setError(e); } } @Override public String name() { return iName; } @Override public double progress() { return iProgress; } @Override public String type() { return TYPE; } public static class FileGenerator implements InstructorExamReport.FileGenerator { String iName; public FileGenerator(String name) { iName = name; } public File generate(String prefix, String ext) { return ApplicationProperties.getTempFile(iName+"_"+prefix, ext); } } }
Examination PDF Reports - fixed NoSuchMethodException when there is one or more subject areas (but not all) selected - the bug was introduced with multiple examination types, revision 3315
JavaSource/org/unitime/timetable/util/queue/PdfExamReportQueueItem.java
Examination PDF Reports - fixed NoSuchMethodException when there is one or more subject areas (but not all) selected - the bug was introduced with multiple examination types, revision 3315
Java
apache-2.0
27760276a5effa50aa1c6863b8bacaf8995bf26b
0
thisdotrob/sunshine-udacity
package com.example.android.sunshine.app; import android.content.Context; import android.database.Cursor; import android.support.v4.widget.CursorAdapter; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; public class ForecastAdapter extends CursorAdapter { private final int VIEW_TYPE_TODAY = 0; private final int VIEW_TYPE_FUTURE_DAY = 1; public ForecastAdapter(Context context, Cursor c, int flags) { super(context, c, flags); } @Override public View newView(Context context, Cursor cursor, ViewGroup parent) { int viewType = getItemViewType(cursor.getPosition()); int layoutId = -1; if (viewType == VIEW_TYPE_TODAY) { layoutId = R.layout.list_item_forecast_today; } else if (viewType == VIEW_TYPE_FUTURE_DAY) { layoutId = R.layout.list_item_forecast; } View view = LayoutInflater.from(context).inflate(layoutId, parent, false); ViewHolder viewHolder = new ViewHolder(view); view.setTag(viewHolder); return view; } @Override public void bindView(View view, Context context, Cursor cursor) { ViewHolder viewHolder = (ViewHolder) view.getTag(); int iconId = cursor.getInt(MainFragment.COL_WEATHER_ID); double high = cursor.getDouble(MainFragment.COL_WEATHER_MAX_TEMP); double low = cursor.getDouble(MainFragment.COL_WEATHER_MIN_TEMP); String description = cursor.getString(MainFragment.COL_WEATHER_DESC); long dateInMillis = cursor.getLong(MainFragment.COL_WEATHER_DATE); boolean isMetric = Utility.isMetric(context); viewHolder.iconView.setImageResource(R.drawable.ic_launcher); viewHolder.dateView.setText(Utility.formatDate(dateInMillis)); viewHolder.descriptionView.setText(description); viewHolder.highTempView.setText(Utility.formatTemperature(high, isMetric)); viewHolder.lowTempView.setText(Utility.formatTemperature(low, isMetric)); } @Override public int getViewTypeCount() { return 2; } @Override public int getItemViewType(int position) { return (position == 0) ? VIEW_TYPE_TODAY : VIEW_TYPE_FUTURE_DAY; } public static class ViewHolder { public final ImageView iconView; public final TextView dateView; public final TextView descriptionView; public final TextView highTempView; public final TextView lowTempView; public ViewHolder(View view) { iconView = (ImageView) view.findViewById(R.id.list_item_icon); dateView = (TextView) view.findViewById(R.id.list_item_date_textview); descriptionView = (TextView) view.findViewById(R.id.list_item_forecast_textview); highTempView = (TextView) view.findViewById(R.id.list_item_high_textview); lowTempView = (TextView) view.findViewById(R.id.list_item_low_textview); } } }
app/src/main/java/com/example/android/sunshine/app/ForecastAdapter.java
package com.example.android.sunshine.app; import android.content.Context; import android.database.Cursor; import android.support.v4.widget.CursorAdapter; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.ImageView; import android.widget.TextView; public class ForecastAdapter extends CursorAdapter { private final int VIEW_TYPE_TODAY = 0; private final int VIEW_TYPE_FUTURE_DAY = 1; public ForecastAdapter(Context context, Cursor c, int flags) { super(context, c, flags); } @Override public View newView(Context context, Cursor cursor, ViewGroup parent) { int viewType = getItemViewType(cursor.getPosition()); int layoutId = -1; if (viewType == VIEW_TYPE_TODAY) { layoutId = R.layout.list_item_forecast_today; } else if (viewType == VIEW_TYPE_FUTURE_DAY) { layoutId = R.layout.list_item_forecast; } View view = LayoutInflater.from(context).inflate(layoutId, parent, false); return view; } @Override public void bindView(View view, Context context, Cursor cursor) { ImageView iconView = (ImageView) view.findViewById(R.id.list_item_icon); TextView dateView = (TextView) view.findViewById(R.id.list_item_date_textview); TextView forecastView = (TextView) view.findViewById(R.id.list_item_forecast_textview); TextView highView = (TextView) view.findViewById(R.id.list_item_high_textview); TextView lowView = (TextView) view.findViewById(R.id.list_item_low_textview); int iconId = cursor.getInt(MainFragment.COL_WEATHER_ID); double high = cursor.getDouble(MainFragment.COL_WEATHER_MAX_TEMP); double low = cursor.getDouble(MainFragment.COL_WEATHER_MIN_TEMP); String forecast = cursor.getString(MainFragment.COL_WEATHER_DESC); long dateInMillis = cursor.getLong(MainFragment.COL_WEATHER_DATE); boolean isMetric = Utility.isMetric(context); iconView.setImageResource(R.drawable.ic_launcher); dateView.setText(Utility.formatDate(dateInMillis)); forecastView.setText(forecast); highView.setText(Utility.formatTemperature(high, isMetric)); lowView.setText(Utility.formatTemperature(low, isMetric)); } @Override public int getViewTypeCount() { return 2; } @Override public int getItemViewType(int position) { return (position == 0) ? VIEW_TYPE_TODAY : VIEW_TYPE_FUTURE_DAY; } }
Use ViewHolder to access list item views
app/src/main/java/com/example/android/sunshine/app/ForecastAdapter.java
Use ViewHolder to access list item views
Java
apache-2.0
636185ea41c6f676e1eb6fe51d775638c6da895d
0
dhalperi/beam,yk5/beam,RyanSkraba/beam,apache/beam,staslev/beam,mxm/incubator-beam,RyanSkraba/beam,lukecwik/incubator-beam,manuzhang/beam,charlesccychen/incubator-beam,markflyhigh/incubator-beam,peihe/incubator-beam,amarouni/incubator-beam,lukecwik/incubator-beam,manuzhang/incubator-beam,manuzhang/incubator-beam,RyanSkraba/beam,apache/beam,markflyhigh/incubator-beam,yk5/beam,dhalperi/beam,charlesccychen/beam,apache/beam,wangyum/beam,lukecwik/incubator-beam,staslev/incubator-beam,rangadi/incubator-beam,apache/beam,chamikaramj/beam,rangadi/beam,charlesccychen/beam,dhalperi/incubator-beam,tgroh/beam,vikkyrk/incubator-beam,mxm/incubator-beam,jbonofre/beam,wtanaka/beam,dhalperi/beam,vikkyrk/incubator-beam,tgroh/beam,iemejia/incubator-beam,wtanaka/beam,staslev/incubator-beam,wangyum/beam,markflyhigh/incubator-beam,tgroh/beam,charlesccychen/incubator-beam,apache/beam,jbonofre/incubator-beam,amitsela/incubator-beam,wangyum/beam,charlesccychen/beam,rangadi/incubator-beam,apache/beam,charlesccychen/beam,rangadi/beam,lukecwik/incubator-beam,markflyhigh/incubator-beam,rangadi/beam,lukecwik/incubator-beam,lukecwik/incubator-beam,tgroh/incubator-beam,charlesccychen/beam,RyanSkraba/beam,sammcveety/incubator-beam,yk5/beam,robertwb/incubator-beam,jbonofre/beam,robertwb/incubator-beam,rangadi/beam,amitsela/beam,chamikaramj/beam,sammcveety/incubator-beam,markflyhigh/incubator-beam,tgroh/beam,rangadi/beam,rangadi/beam,lukecwik/incubator-beam,chamikaramj/beam,rangadi/incubator-beam,amitsela/beam,chamikaramj/beam,lukecwik/incubator-beam,eljefe6a/incubator-beam,apache/beam,chamikaramj/beam,markflyhigh/incubator-beam,manuzhang/beam,amitsela/beam,chamikaramj/beam,tgroh/incubator-beam,peihe/incubator-beam,markflyhigh/incubator-beam,rangadi/beam,RyanSkraba/beam,chamikaramj/beam,eljefe6a/incubator-beam,apache/beam,vikkyrk/incubator-beam,sammcveety/incubator-beam,xsm110/Apache-Beam,charlesccychen/incubator-beam,RyanSkraba/beam,robertwb/incubator-beam,apache/beam,jbonofre/beam,apache/beam,robertwb/incubator-beam,amitsela/incubator-beam,robertwb/incubator-beam,xsm110/Apache-Beam,lukecwik/incubator-beam,RyanSkraba/beam,chamikaramj/beam,amarouni/incubator-beam,eljefe6a/incubator-beam,jbonofre/incubator-beam,iemejia/incubator-beam,wangyum/beam,jbonofre/beam,robertwb/incubator-beam,manuzhang/beam,charlesccychen/beam,apache/beam,robertwb/incubator-beam,charlesccychen/beam,staslev/beam,wtanaka/beam,chamikaramj/beam,dhalperi/incubator-beam,xsm110/Apache-Beam,chamikaramj/beam,robertwb/incubator-beam,robertwb/incubator-beam,robertwb/incubator-beam,lukecwik/incubator-beam,peihe/incubator-beam,staslev/beam
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.transforms; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.VoidCoder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.OffsetBasedSource; import org.apache.beam.sdk.io.OffsetBasedSource.OffsetBasedReader; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TimestampedValue.TimestampedValueCoder; import org.apache.beam.sdk.values.TypeDescriptor; import org.joda.time.Instant; /** * {@code Create<T>} takes a collection of elements of type {@code T} * known when the pipeline is constructed and returns a * {@code PCollection<T>} containing the elements. * * <p>Example of use: * <pre> {@code * Pipeline p = ...; * * PCollection<Integer> pc = p.apply(Create.of(3, 4, 5).withCoder(BigEndianIntegerCoder.of())); * * Map<String, Integer> map = ...; * PCollection<KV<String, Integer>> pt = * p.apply(Create.of(map) * .withCoder(KvCoder.of(StringUtf8Coder.of(), * BigEndianIntegerCoder.of()))); * } </pre> * * <p>{@code Create} can automatically determine the {@code Coder} to use * if all elements have the same run-time class, and a default coder is registered for that * class. See {@link CoderRegistry} for details on how defaults are determined. * * <p>If a coder can not be inferred, {@link Create.Values#withCoder} must be called * explicitly to set the encoding of the resulting * {@code PCollection}. * * <p>A good use for {@code Create} is when a {@code PCollection} * needs to be created without dependencies on files or other external * entities. This is especially useful during testing. * * <p>Caveat: {@code Create} only supports small in-memory datasets, * particularly when submitting jobs to the Google Cloud Dataflow * service. * * @param <T> the type of the elements of the resulting {@code PCollection} */ @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public class Create<T> { /** * Returns a new {@code Create.Values} transform that produces a * {@link PCollection} containing elements of the provided * {@code Iterable}. * * <p>The argument should not be modified after this is called. * * <p>The elements of the output {@link PCollection} will have a timestamp of negative infinity, * see {@link Create#timestamped} for a way of creating a {@code PCollection} with timestamped * elements. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.Values#withCoder} to set the coder explicitly. */ public static <T> Values<T> of(Iterable<T> elems) { return new Values<>(elems, Optional.<Coder<T>>absent(), Optional.<TypeDescriptor<T>>absent()); } /** * Returns a new {@code Create.Values} transform that produces a * {@link PCollection} containing the specified elements. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>The arguments should not be modified after this is called. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.Values#withCoder} to set the coder explicitly. */ @SafeVarargs public static <T> Values<T> of(T elem, T... elems) { // This can't be an ImmutableList, as it may accept nulls List<T> input = new ArrayList<>(elems.length + 1); input.add(elem); input.addAll(Arrays.asList(elems)); return of(input); } /** * Returns a new {@code Create.Values} transform that produces * an empty {@link PCollection}. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>Since there are no elements, the {@code Coder} cannot be automatically determined. * Instead, the {@code Coder} is provided via the {@code coder} argument. */ public static <T> Values<T> empty(Coder<T> coder) { return new Values<>(new ArrayList<T>(), Optional.of(coder), Optional.<TypeDescriptor<T>>absent()); } /** * Returns a new {@code Create.Values} transform that produces * an empty {@link PCollection}. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>Since there are no elements, the {@code Coder} cannot be automatically determined. * Instead, the {@code Coder} is determined from given {@code TypeDescriptor<T>}. * Note that a default coder must be registered for the class described in the * {@code TypeDescriptor<T>}. */ public static <T> Values<T> empty(TypeDescriptor<T> type) { return new Values<>(new ArrayList<T>(), Optional.<Coder<T>>absent(), Optional.of(type)); } /** * Returns a new {@code Create.Values} transform that produces a * {@link PCollection} of {@link KV}s corresponding to the keys and * values of the specified {@code Map}. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.Values#withCoder} to set the coder explicitly. */ public static <K, V> Values<KV<K, V>> of(Map<K, V> elems) { List<KV<K, V>> kvs = new ArrayList<>(elems.size()); for (Map.Entry<K, V> entry : elems.entrySet()) { kvs.add(KV.of(entry.getKey(), entry.getValue())); } return of(kvs); } /** * Returns a new {@link Create.TimestampedValues} transform that produces a * {@link PCollection} containing the elements of the provided {@code Iterable} * with the specified timestamps. * * <p>The argument should not be modified after this is called. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the {@code Coder} * to use if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.TimestampedValues#withCoder} to set the coder explicitly. */ public static <T> TimestampedValues<T> timestamped(Iterable<TimestampedValue<T>> elems) { return new TimestampedValues<>( elems, Optional.<Coder<T>>absent(), Optional.<TypeDescriptor<T>>absent()); } /** * Returns a new {@link Create.TimestampedValues} transform that produces a {@link PCollection} * containing the specified elements with the specified timestamps. * * <p>The arguments should not be modified after this is called. */ @SafeVarargs public static <T> TimestampedValues<T> timestamped( TimestampedValue<T> elem, @SuppressWarnings("unchecked") TimestampedValue<T>... elems) { return timestamped(ImmutableList.<TimestampedValue<T>>builder().add(elem).add(elems).build()); } /** * Returns a new root transform that produces a {@link PCollection} containing * the specified elements with the specified timestamps. * * <p>The arguments should not be modified after this is called. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the {@code Coder} * to use if all elements have the same non-parameterized run-time class, and a default coder * is registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.TimestampedValues#withCoder} to set the coder explicitly. * @throws IllegalArgumentException if there are a different number of values * and timestamps */ public static <T> TimestampedValues<T> timestamped( Iterable<T> values, Iterable<Long> timestamps) { List<TimestampedValue<T>> elems = new ArrayList<>(); Iterator<T> valueIter = values.iterator(); Iterator<Long> timestampIter = timestamps.iterator(); while (valueIter.hasNext() && timestampIter.hasNext()) { elems.add(TimestampedValue.of(valueIter.next(), new Instant(timestampIter.next()))); } checkArgument( !valueIter.hasNext() && !timestampIter.hasNext(), "Expect sizes of values and timestamps are same."); return timestamped(elems); } ///////////////////////////////////////////////////////////////////////////// /** * A {@code PTransform} that creates a {@code PCollection} from a set of in-memory objects. */ public static class Values<T> extends PTransform<PBegin, PCollection<T>> { /** * Returns a {@link Create.Values} PTransform like this one that uses the given * {@code Coder<T>} to decode each of the objects into a * value of type {@code T}. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * * <p>Note that for {@link Create.Values} with no elements, the {@link VoidCoder} is used. */ public Values<T> withCoder(Coder<T> coder) { return new Values<>(elems, Optional.of(coder), typeDescriptor); } /** * Returns a {@link Create.Values} PTransform like this one that uses the given * {@code TypeDescriptor<T>} to determine the {@code Coder} to use to decode each of the * objects into a value of type {@code T}. Note that a default coder must be registered for the * class described in the {@code TypeDescriptor<T>}. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * * <p>Note that for {@link Create.Values} with no elements, the {@link VoidCoder} is used. */ public Values<T> withType(TypeDescriptor<T> type) { return new Values<>(elems, coder, Optional.of(type)); } public Iterable<T> getElements() { return elems; } @Override public PCollection<T> expand(PBegin input) { try { Coder<T> coder = getDefaultOutputCoder(input); try { CreateSource<T> source = CreateSource.fromIterable(elems, coder); return input.getPipeline().apply(Read.from(source)); } catch (IOException e) { throw new RuntimeException( String.format("Unable to apply Create %s using Coder %s.", this, coder), e); } } catch (CannotProvideCoderException e) { throw new IllegalArgumentException("Unable to infer a coder and no Coder was specified. " + "Please set a coder by invoking Create.withCoder() explicitly.", e); } } @Override public Coder<T> getDefaultOutputCoder(PBegin input) throws CannotProvideCoderException { if (coder.isPresent()) { return coder.get(); } else if (typeDescriptor.isPresent()) { return input.getPipeline().getCoderRegistry().getDefaultCoder(typeDescriptor.get()); } else { return getDefaultCreateCoder(input.getPipeline().getCoderRegistry(), elems); } } ///////////////////////////////////////////////////////////////////////////// /** The elements of the resulting PCollection. */ private final transient Iterable<T> elems; /** The coder used to encode the values to and from a binary representation. */ private final transient Optional<Coder<T>> coder; /** The value type. */ private final transient Optional<TypeDescriptor<T>> typeDescriptor; /** * Constructs a {@code Create.Values} transform that produces a * {@link PCollection} containing the specified elements. * * <p>The arguments should not be modified after this is called. */ private Values( Iterable<T> elems, Optional<Coder<T>> coder, Optional<TypeDescriptor<T>> typeDescriptor) { this.elems = elems; this.coder = coder; this.typeDescriptor = typeDescriptor; } @VisibleForTesting static class CreateSource<T> extends OffsetBasedSource<T> { private final List<byte[]> allElementsBytes; private final long totalSize; private final Coder<T> coder; public static <T> CreateSource<T> fromIterable(Iterable<T> elements, Coder<T> elemCoder) throws CoderException, IOException { ImmutableList.Builder<byte[]> allElementsBytes = ImmutableList.builder(); long totalSize = 0L; for (T element : elements) { byte[] bytes = CoderUtils.encodeToByteArray(elemCoder, element); allElementsBytes.add(bytes); totalSize += bytes.length; } return new CreateSource<>(allElementsBytes.build(), totalSize, elemCoder); } /** * Create a new source with the specified bytes. The new source owns the input element bytes, * which must not be modified after this constructor is called. */ private CreateSource(List<byte[]> elementBytes, long totalSize, Coder<T> coder) { super(0, elementBytes.size(), 1); this.allElementsBytes = ImmutableList.copyOf(elementBytes); this.totalSize = totalSize; this.coder = coder; } @Override public long getEstimatedSizeBytes(PipelineOptions options) throws Exception { return totalSize; } @Override public BoundedSource.BoundedReader<T> createReader(PipelineOptions options) throws IOException { return new BytesReader<>(this); } @Override public void validate() {} @Override public Coder<T> getDefaultOutputCoder() { return coder; } @Override public long getMaxEndOffset(PipelineOptions options) throws Exception { return allElementsBytes.size(); } @Override public OffsetBasedSource<T> createSourceForSubrange(long start, long end) { List<byte[]> primaryElems = allElementsBytes.subList((int) start, (int) end); long primarySizeEstimate = (long) (totalSize * primaryElems.size() / (double) allElementsBytes.size()); return new CreateSource<>(primaryElems, primarySizeEstimate, coder); } @Override public long getBytesPerOffset() { if (allElementsBytes.size() == 0) { return 1L; } return Math.max(1, totalSize / allElementsBytes.size()); } } private static class BytesReader<T> extends OffsetBasedReader<T> { private int index; /** * Use an optional to distinguish between null next element (as Optional.absent()) and no next * element (next is null). */ @Nullable private Optional<T> next; public BytesReader(CreateSource<T> source) { super(source); index = -1; } @Override @Nullable public T getCurrent() throws NoSuchElementException { if (next == null) { throw new NoSuchElementException(); } return next.orNull(); } @Override public void close() throws IOException {} @Override protected long getCurrentOffset() { return index; } @Override protected boolean startImpl() throws IOException { return advanceImpl(); } @Override public synchronized CreateSource<T> getCurrentSource() { return (CreateSource<T>) super.getCurrentSource(); } @Override protected boolean advanceImpl() throws IOException { CreateSource<T> source = getCurrentSource(); if (index + 1 >= source.allElementsBytes.size()) { next = null; return false; } index++; next = Optional.fromNullable( CoderUtils.decodeFromByteArray(source.coder, source.allElementsBytes.get(index))); return true; } } } ///////////////////////////////////////////////////////////////////////////// /** * A {@code PTransform} that creates a {@code PCollection} whose elements have * associated timestamps. */ public static class TimestampedValues<T> extends PTransform<PBegin, PCollection<T>>{ /** * Returns a {@link Create.TimestampedValues} PTransform like this one that uses the given * {@code Coder<T>} to decode each of the objects into a * value of type {@code T}. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the * {@code Coder} to use if all elements have the same non-parameterized run-time class, * and a default coder is registered for that class. See {@link CoderRegistry} for details * on how defaults are determined. * * <p>Note that for {@link Create.TimestampedValues with no elements}, the {@link VoidCoder} * is used. */ public TimestampedValues<T> withCoder(Coder<T> coder) { return new TimestampedValues<>(timestampedElements, Optional.of(coder), typeDescriptor); } /** * Returns a {@link Create.TimestampedValues} PTransform like this one that uses the given * {@code TypeDescriptor<T>} to determine the {@code Coder} to use to decode each of the * objects into a value of type {@code T}. Note that a default coder must be registered for the * class described in the {@code TypeDescriptor<T>}. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the {@code Coder} * to use if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * * <p>Note that for {@link Create.TimestampedValues} with no elements, the {@link VoidCoder} is * used. */ public TimestampedValues<T> withType(TypeDescriptor<T> type) { return new TimestampedValues<>(timestampedElements, elementCoder, Optional.of(type)); } @Override public PCollection<T> expand(PBegin input) { try { Coder<T> coder = getDefaultOutputCoder(input); PCollection<TimestampedValue<T>> intermediate = Pipeline.applyTransform(input, Create.of(timestampedElements).withCoder(TimestampedValueCoder.of(coder))); PCollection<T> output = intermediate.apply(ParDo.of(new ConvertTimestamps<T>())); output.setCoder(coder); return output; } catch (CannotProvideCoderException e) { throw new IllegalArgumentException("Unable to infer a coder and no Coder was specified. " + "Please set a coder by invoking CreateTimestamped.withCoder() explicitly.", e); } } ///////////////////////////////////////////////////////////////////////////// /** The timestamped elements of the resulting PCollection. */ private final transient Iterable<TimestampedValue<T>> timestampedElements; /** The coder used to encode the values to and from a binary representation. */ private final transient Optional<Coder<T>> elementCoder; /** The value type. */ private final transient Optional<TypeDescriptor<T>> typeDescriptor; private TimestampedValues( Iterable<TimestampedValue<T>> timestampedElements, Optional<Coder<T>> elementCoder, Optional<TypeDescriptor<T>> typeDescriptor) { this.timestampedElements = timestampedElements; this.elementCoder = elementCoder; this.typeDescriptor = typeDescriptor; } private static class ConvertTimestamps<T> extends DoFn<TimestampedValue<T>, T> { @ProcessElement public void processElement(ProcessContext c) { c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp()); } } @Override public Coder<T> getDefaultOutputCoder(PBegin input) throws CannotProvideCoderException { if (elementCoder.isPresent()) { return elementCoder.get(); } else if (typeDescriptor.isPresent()) { return input.getPipeline().getCoderRegistry().getDefaultCoder(typeDescriptor.get()); } else { Iterable<T> rawElements = Iterables.transform( timestampedElements, new Function<TimestampedValue<T>, T>() { @Override public T apply(TimestampedValue<T> input) { return input.getValue(); } }); return getDefaultCreateCoder(input.getPipeline().getCoderRegistry(), rawElements); } } } private static <T> Coder<T> getDefaultCreateCoder(CoderRegistry registry, Iterable<T> elems) throws CannotProvideCoderException { checkArgument( !Iterables.isEmpty(elems), "Elements must be provided to construct the default Create Coder. To Create an empty " + "PCollection, either call Create.empty(Coder), or call 'withCoder(Coder)' on the " + "result PTransform"); // First try to deduce a coder using the types of the elements. Class<?> elementClazz = Void.class; for (T elem : elems) { if (elem == null) { continue; } Class<?> clazz = elem.getClass(); if (elementClazz.equals(Void.class)) { elementClazz = clazz; } else if (!elementClazz.equals(clazz)) { // Elements are not the same type, require a user-specified coder. throw new CannotProvideCoderException( String.format( "Cannot provide coder for %s: The elements are not all of the same class.", Create.class.getSimpleName())); } } if (elementClazz.getTypeParameters().length == 0) { try { @SuppressWarnings("unchecked") // elementClazz is a wildcard type Coder<T> coder = (Coder<T>) registry.getDefaultCoder(TypeDescriptor.of(elementClazz)); return coder; } catch (CannotProvideCoderException exc) { // Can't get a coder from the class of the elements, try with the elements next } } // If that fails, try to deduce a coder using the elements themselves Optional<Coder<T>> coder = Optional.absent(); for (T elem : elems) { Coder<T> c = registry.getDefaultCoder(elem); if (!coder.isPresent()) { coder = Optional.of(c); } else if (!Objects.equals(c, coder.get())) { throw new CannotProvideCoderException( "Cannot provide coder for elements of " + Create.class.getSimpleName() + ":" + " For their common class, no coder could be provided." + " Based on their values, they do not all default to the same Coder."); } } if (!coder.isPresent()) { throw new CannotProvideCoderException( "Unable to infer a coder. Please register " + "a coder for "); } return coder.get(); } }
sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.sdk.transforms; import static com.google.common.base.Preconditions.checkArgument; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Function; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; import com.google.common.collect.Iterables; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.NoSuchElementException; import java.util.Objects; import javax.annotation.Nullable; import org.apache.beam.sdk.Pipeline; import org.apache.beam.sdk.coders.CannotProvideCoderException; import org.apache.beam.sdk.coders.Coder; import org.apache.beam.sdk.coders.CoderException; import org.apache.beam.sdk.coders.CoderRegistry; import org.apache.beam.sdk.coders.VoidCoder; import org.apache.beam.sdk.io.BoundedSource; import org.apache.beam.sdk.io.OffsetBasedSource; import org.apache.beam.sdk.io.OffsetBasedSource.OffsetBasedReader; import org.apache.beam.sdk.io.Read; import org.apache.beam.sdk.options.PipelineOptions; import org.apache.beam.sdk.util.CoderUtils; import org.apache.beam.sdk.values.KV; import org.apache.beam.sdk.values.PBegin; import org.apache.beam.sdk.values.PCollection; import org.apache.beam.sdk.values.TimestampedValue; import org.apache.beam.sdk.values.TimestampedValue.TimestampedValueCoder; import org.apache.beam.sdk.values.TypeDescriptor; import org.joda.time.Instant; /** * {@code Create<T>} takes a collection of elements of type {@code T} * known when the pipeline is constructed and returns a * {@code PCollection<T>} containing the elements. * * <p>Example of use: * <pre> {@code * Pipeline p = ...; * * PCollection<Integer> pc = p.apply(Create.of(3, 4, 5).withCoder(BigEndianIntegerCoder.of())); * * Map<String, Integer> map = ...; * PCollection<KV<String, Integer>> pt = * p.apply(Create.of(map) * .withCoder(KvCoder.of(StringUtf8Coder.of(), * BigEndianIntegerCoder.of()))); * } </pre> * * <p>{@code Create} can automatically determine the {@code Coder} to use * if all elements have the same run-time class, and a default coder is registered for that * class. See {@link CoderRegistry} for details on how defaults are determined. * * <p>If a coder can not be inferred, {@link Create.Values#withCoder} must be called * explicitly to set the encoding of the resulting * {@code PCollection}. * * <p>A good use for {@code Create} is when a {@code PCollection} * needs to be created without dependencies on files or other external * entities. This is especially useful during testing. * * <p>Caveat: {@code Create} only supports small in-memory datasets, * particularly when submitting jobs to the Google Cloud Dataflow * service. * * @param <T> the type of the elements of the resulting {@code PCollection} */ @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public class Create<T> { /** * Returns a new {@code Create.Values} transform that produces a * {@link PCollection} containing elements of the provided * {@code Iterable}. * * <p>The argument should not be modified after this is called. * * <p>The elements of the output {@link PCollection} will have a timestamp of negative infinity, * see {@link Create#timestamped} for a way of creating a {@code PCollection} with timestamped * elements. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.Values#withCoder} to set the coder explicitly. */ public static <T> Values<T> of(Iterable<T> elems) { return new Values<>(elems, Optional.<Coder<T>>absent(), Optional.<TypeDescriptor<T>>absent()); } /** * Returns a new {@code Create.Values} transform that produces a * {@link PCollection} containing the specified elements. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>The arguments should not be modified after this is called. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.Values#withCoder} to set the coder explicitly. */ @SafeVarargs public static <T> Values<T> of(T elem, T... elems) { // This can't be an ImmutableList, as it may accept nulls List<T> input = new ArrayList<>(elems.length + 1); input.add(elem); input.addAll(Arrays.asList(elems)); return of(input); } /** * Returns a new {@code Create.Values} transform that produces * an empty {@link PCollection}. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>Since there are no elements, the {@code Coder} cannot be automatically determined. * Instead, the {@code Coder} is provided via the {@code coder} argument. */ public static <T> Values<T> empty(Coder<T> coder) { return new Values<>(new ArrayList<T>(), Optional.of(coder), Optional.<TypeDescriptor<T>>absent()); } /** * Returns a new {@code Create.Values} transform that produces * an empty {@link PCollection}. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>Since there are no elements, the {@code Coder} cannot be automatically determined. * Instead, the {@code Coder} is determined from given {@code TypeDescriptor<T>}. * Note that a default coder must be registered for the class described in the * {@code TypeDescriptor<T>}. */ public static <T> Values<T> empty(TypeDescriptor<T> type) { return new Values<>(new ArrayList<T>(), Optional.<Coder<T>>absent(), Optional.of(type)); } /** * Returns a new {@code Create.Values} transform that produces a * {@link PCollection} of {@link KV}s corresponding to the keys and * values of the specified {@code Map}. * * <p>The elements will have a timestamp of negative infinity, see * {@link Create#timestamped} for a way of creating a {@code PCollection} * with timestamped elements. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.Values#withCoder} to set the coder explicitly. */ public static <K, V> Values<KV<K, V>> of(Map<K, V> elems) { List<KV<K, V>> kvs = new ArrayList<>(elems.size()); for (Map.Entry<K, V> entry : elems.entrySet()) { kvs.add(KV.of(entry.getKey(), entry.getValue())); } return of(kvs); } /** * Returns a new {@link Create.TimestampedValues} transform that produces a * {@link PCollection} containing the elements of the provided {@code Iterable} * with the specified timestamps. * * <p>The argument should not be modified after this is called. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the {@code Coder} * to use if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.TimestampedValues#withCoder} to set the coder explicitly. */ public static <T> TimestampedValues<T> timestamped(Iterable<TimestampedValue<T>> elems) { return new TimestampedValues<>( elems, Optional.<Coder<T>>absent(), Optional.<TypeDescriptor<T>>absent()); } /** * Returns a new {@link Create.TimestampedValues} transform that produces a {@link PCollection} * containing the specified elements with the specified timestamps. * * <p>The arguments should not be modified after this is called. */ @SafeVarargs public static <T> TimestampedValues<T> timestamped( TimestampedValue<T> elem, @SuppressWarnings("unchecked") TimestampedValue<T>... elems) { return timestamped(ImmutableList.<TimestampedValue<T>>builder().add(elem).add(elems).build()); } /** * Returns a new root transform that produces a {@link PCollection} containing * the specified elements with the specified timestamps. * * <p>The arguments should not be modified after this is called. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the {@code Coder} * to use if all elements have the same non-parameterized run-time class, and a default coder * is registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * Otherwise, use {@link Create.TimestampedValues#withCoder} to set the coder explicitly. * @throws IllegalArgumentException if there are a different number of values * and timestamps */ public static <T> TimestampedValues<T> timestamped( Iterable<T> values, Iterable<Long> timestamps) { List<TimestampedValue<T>> elems = new ArrayList<>(); Iterator<T> valueIter = values.iterator(); Iterator<Long> timestampIter = timestamps.iterator(); while (valueIter.hasNext() && timestampIter.hasNext()) { elems.add(TimestampedValue.of(valueIter.next(), new Instant(timestampIter.next()))); } checkArgument( !valueIter.hasNext() && !timestampIter.hasNext(), "Expect sizes of values and timestamps are same."); return timestamped(elems); } ///////////////////////////////////////////////////////////////////////////// /** * A {@code PTransform} that creates a {@code PCollection} from a set of in-memory objects. */ public static class Values<T> extends PTransform<PBegin, PCollection<T>> { /** * Returns a {@link Create.Values} PTransform like this one that uses the given * {@code Coder<T>} to decode each of the objects into a * value of type {@code T}. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * * <p>Note that for {@link Create.Values} with no elements, the {@link VoidCoder} is used. */ public Values<T> withCoder(Coder<T> coder) { return new Values<>(elems, Optional.of(coder), typeDescriptor); } /** * Returns a {@link Create.Values} PTransform like this one that uses the given * {@code TypeDescriptor<T>} to determine the {@code Coder} to use to decode each of the * objects into a value of type {@code T}. Note that a default coder must be registered for the * class described in the {@code TypeDescriptor<T>}. * * <p>By default, {@code Create.Values} can automatically determine the {@code Coder} to use * if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * * <p>Note that for {@link Create.Values} with no elements, the {@link VoidCoder} is used. */ public Values<T> withType(TypeDescriptor<T> type) { return new Values<>(elems, coder, Optional.of(type)); } public Iterable<T> getElements() { return elems; } @Override public PCollection<T> expand(PBegin input) { try { Coder<T> coder = getDefaultOutputCoder(input); try { CreateSource<T> source = CreateSource.fromIterable(elems, coder); return input.getPipeline().apply(Read.from(source)); } catch (IOException e) { throw new RuntimeException( String.format("Unable to apply Create %s using Coder %s.", this, coder), e); } } catch (CannotProvideCoderException e) { throw new IllegalArgumentException("Unable to infer a coder and no Coder was specified. " + "Please set a coder by invoking Create.withCoder() explicitly.", e); } } @Override public Coder<T> getDefaultOutputCoder(PBegin input) throws CannotProvideCoderException { if (coder.isPresent()) { return coder.get(); } else if (typeDescriptor.isPresent()) { return input.getPipeline().getCoderRegistry().getDefaultCoder(typeDescriptor.get()); } else { return getDefaultCreateCoder(input.getPipeline().getCoderRegistry(), elems); } } ///////////////////////////////////////////////////////////////////////////// /** The elements of the resulting PCollection. */ private final transient Iterable<T> elems; /** The coder used to encode the values to and from a binary representation. */ private final transient Optional<Coder<T>> coder; /** The value type. */ private final transient Optional<TypeDescriptor<T>> typeDescriptor; /** * Constructs a {@code Create.Values} transform that produces a * {@link PCollection} containing the specified elements. * * <p>The arguments should not be modified after this is called. */ private Values( Iterable<T> elems, Optional<Coder<T>> coder, Optional<TypeDescriptor<T>> typeDescriptor) { this.elems = elems; this.coder = coder; this.typeDescriptor = typeDescriptor; } @VisibleForTesting static class CreateSource<T> extends OffsetBasedSource<T> { private final List<byte[]> allElementsBytes; private final long totalSize; private final Coder<T> coder; public static <T> CreateSource<T> fromIterable(Iterable<T> elements, Coder<T> elemCoder) throws CoderException, IOException { ImmutableList.Builder<byte[]> allElementsBytes = ImmutableList.builder(); long totalSize = 0L; for (T element : elements) { byte[] bytes = CoderUtils.encodeToByteArray(elemCoder, element); allElementsBytes.add(bytes); totalSize += bytes.length; } return new CreateSource<>(allElementsBytes.build(), totalSize, elemCoder); } /** * Create a new source with the specified bytes. The new source owns the input element bytes, * which must not be modified after this constructor is called. */ private CreateSource(List<byte[]> elementBytes, long totalSize, Coder<T> coder) { super(0, elementBytes.size(), 1); this.allElementsBytes = ImmutableList.copyOf(elementBytes); this.totalSize = totalSize; this.coder = coder; } @Override public long getEstimatedSizeBytes(PipelineOptions options) throws Exception { return totalSize; } @Override public BoundedSource.BoundedReader<T> createReader(PipelineOptions options) throws IOException { return new BytesReader<>(this); } @Override public void validate() {} @Override public Coder<T> getDefaultOutputCoder() { return coder; } @Override public long getMaxEndOffset(PipelineOptions options) throws Exception { return allElementsBytes.size(); } @Override public OffsetBasedSource<T> createSourceForSubrange(long start, long end) { List<byte[]> primaryElems = allElementsBytes.subList((int) start, (int) end); long primarySizeEstimate = (long) (totalSize * primaryElems.size() / (double) allElementsBytes.size()); return new CreateSource<>(primaryElems, primarySizeEstimate, coder); } @Override public long getBytesPerOffset() { if (allElementsBytes.size() == 0) { return 1L; } return Math.max(1, totalSize / allElementsBytes.size()); } } private static class BytesReader<T> extends OffsetBasedReader<T> { private int index; /** * Use an optional to distinguish between null next element (as Optional.absent()) and no next * element (next is null). */ @Nullable private Optional<T> next; public BytesReader(CreateSource<T> source) { super(source); index = -1; } @Override @Nullable public T getCurrent() throws NoSuchElementException { if (next == null) { throw new NoSuchElementException(); } return next.orNull(); } @Override public void close() throws IOException {} @Override protected long getCurrentOffset() { return index; } @Override protected boolean startImpl() throws IOException { return advanceImpl(); } @Override public synchronized CreateSource<T> getCurrentSource() { return (CreateSource<T>) super.getCurrentSource(); } @Override protected boolean advanceImpl() throws IOException { CreateSource<T> source = getCurrentSource(); index++; if (index >= source.allElementsBytes.size()) { next = null; return false; } next = Optional.fromNullable( CoderUtils.decodeFromByteArray(source.coder, source.allElementsBytes.get(index))); return true; } } } ///////////////////////////////////////////////////////////////////////////// /** * A {@code PTransform} that creates a {@code PCollection} whose elements have * associated timestamps. */ public static class TimestampedValues<T> extends PTransform<PBegin, PCollection<T>>{ /** * Returns a {@link Create.TimestampedValues} PTransform like this one that uses the given * {@code Coder<T>} to decode each of the objects into a * value of type {@code T}. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the * {@code Coder} to use if all elements have the same non-parameterized run-time class, * and a default coder is registered for that class. See {@link CoderRegistry} for details * on how defaults are determined. * * <p>Note that for {@link Create.TimestampedValues with no elements}, the {@link VoidCoder} * is used. */ public TimestampedValues<T> withCoder(Coder<T> coder) { return new TimestampedValues<>(timestampedElements, Optional.of(coder), typeDescriptor); } /** * Returns a {@link Create.TimestampedValues} PTransform like this one that uses the given * {@code TypeDescriptor<T>} to determine the {@code Coder} to use to decode each of the * objects into a value of type {@code T}. Note that a default coder must be registered for the * class described in the {@code TypeDescriptor<T>}. * * <p>By default, {@code Create.TimestampedValues} can automatically determine the {@code Coder} * to use if all elements have the same non-parameterized run-time class, and a default coder is * registered for that class. See {@link CoderRegistry} for details on how defaults are * determined. * * <p>Note that for {@link Create.TimestampedValues} with no elements, the {@link VoidCoder} is * used. */ public TimestampedValues<T> withType(TypeDescriptor<T> type) { return new TimestampedValues<>(timestampedElements, elementCoder, Optional.of(type)); } @Override public PCollection<T> expand(PBegin input) { try { Coder<T> coder = getDefaultOutputCoder(input); PCollection<TimestampedValue<T>> intermediate = Pipeline.applyTransform(input, Create.of(timestampedElements).withCoder(TimestampedValueCoder.of(coder))); PCollection<T> output = intermediate.apply(ParDo.of(new ConvertTimestamps<T>())); output.setCoder(coder); return output; } catch (CannotProvideCoderException e) { throw new IllegalArgumentException("Unable to infer a coder and no Coder was specified. " + "Please set a coder by invoking CreateTimestamped.withCoder() explicitly.", e); } } ///////////////////////////////////////////////////////////////////////////// /** The timestamped elements of the resulting PCollection. */ private final transient Iterable<TimestampedValue<T>> timestampedElements; /** The coder used to encode the values to and from a binary representation. */ private final transient Optional<Coder<T>> elementCoder; /** The value type. */ private final transient Optional<TypeDescriptor<T>> typeDescriptor; private TimestampedValues( Iterable<TimestampedValue<T>> timestampedElements, Optional<Coder<T>> elementCoder, Optional<TypeDescriptor<T>> typeDescriptor) { this.timestampedElements = timestampedElements; this.elementCoder = elementCoder; this.typeDescriptor = typeDescriptor; } private static class ConvertTimestamps<T> extends DoFn<TimestampedValue<T>, T> { @ProcessElement public void processElement(ProcessContext c) { c.outputWithTimestamp(c.element().getValue(), c.element().getTimestamp()); } } @Override public Coder<T> getDefaultOutputCoder(PBegin input) throws CannotProvideCoderException { if (elementCoder.isPresent()) { return elementCoder.get(); } else if (typeDescriptor.isPresent()) { return input.getPipeline().getCoderRegistry().getDefaultCoder(typeDescriptor.get()); } else { Iterable<T> rawElements = Iterables.transform( timestampedElements, new Function<TimestampedValue<T>, T>() { @Override public T apply(TimestampedValue<T> input) { return input.getValue(); } }); return getDefaultCreateCoder(input.getPipeline().getCoderRegistry(), rawElements); } } } private static <T> Coder<T> getDefaultCreateCoder(CoderRegistry registry, Iterable<T> elems) throws CannotProvideCoderException { checkArgument( !Iterables.isEmpty(elems), "Elements must be provided to construct the default Create Coder. To Create an empty " + "PCollection, either call Create.empty(Coder), or call 'withCoder(Coder)' on the " + "result PTransform"); // First try to deduce a coder using the types of the elements. Class<?> elementClazz = Void.class; for (T elem : elems) { if (elem == null) { continue; } Class<?> clazz = elem.getClass(); if (elementClazz.equals(Void.class)) { elementClazz = clazz; } else if (!elementClazz.equals(clazz)) { // Elements are not the same type, require a user-specified coder. throw new CannotProvideCoderException( String.format( "Cannot provide coder for %s: The elements are not all of the same class.", Create.class.getSimpleName())); } } if (elementClazz.getTypeParameters().length == 0) { try { @SuppressWarnings("unchecked") // elementClazz is a wildcard type Coder<T> coder = (Coder<T>) registry.getDefaultCoder(TypeDescriptor.of(elementClazz)); return coder; } catch (CannotProvideCoderException exc) { // Can't get a coder from the class of the elements, try with the elements next } } // If that fails, try to deduce a coder using the elements themselves Optional<Coder<T>> coder = Optional.absent(); for (T elem : elems) { Coder<T> c = registry.getDefaultCoder(elem); if (!coder.isPresent()) { coder = Optional.of(c); } else if (!Objects.equals(c, coder.get())) { throw new CannotProvideCoderException( "Cannot provide coder for elements of " + Create.class.getSimpleName() + ":" + " For their common class, no coder could be provided." + " Based on their values, they do not all default to the same Coder."); } } if (!coder.isPresent()) { throw new CannotProvideCoderException( "Unable to infer a coder. Please register " + "a coder for "); } return coder.get(); } }
Only advance Index in CreateSource if an element exists This prevents an IndexOutOfBoundsException when the index in Create overflows.
sdks/java/core/src/main/java/org/apache/beam/sdk/transforms/Create.java
Only advance Index in CreateSource if an element exists
Java
bsd-3-clause
4809337fa5aaf0b85cc48eb55acd56b249249d11
0
QMXTech/MachineMusePowersuits,QMXTech/MachineMusePowersuits
package net.machinemuse.powersuits.common; import cpw.mods.fml.common.Loader; import net.machinemuse.api.IModularItem; import net.machinemuse.api.ModuleManager; import net.machinemuse.general.MuseLogger; import net.machinemuse.powersuits.powermodule.armor.ApiaristArmorModule; import net.machinemuse.powersuits.powermodule.armor.HazmatModule; import net.machinemuse.powersuits.powermodule.misc.AirtightSealModule; import net.machinemuse.powersuits.powermodule.misc.ThaumGogglesModule; import net.machinemuse.powersuits.powermodule.tool.GrafterModule; import net.machinemuse.powersuits.powermodule.tool.MFFSFieldTeleporterModule; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraftforge.common.Configuration; import java.util.Arrays; import java.util.Collections; public class ModCompatability { public static boolean isGregTechLoaded() { return Loader.isModLoaded("GregTech_Addon"); } public static boolean isBasicComponentsLoaded() { return Loader.isModLoaded("BasicComponents"); } public static boolean isIndustrialCraftLoaded() { return Loader.isModLoaded("IC2"); } public static boolean isThaumCraftLoaded() { return Loader.isModLoaded("Thaumcraft"); } public static boolean isThermalExpansionLoaded() { return Loader.isModLoaded("ThermalExpansion"); } public static boolean isGalacticraftLoaded() { return Loader.isModLoaded("GalacticraftCore"); } public static boolean isForestryLoaded() { return Loader.isModLoaded("Forestry"); } public static boolean isOmniToolsLoaded() { return Loader.isModLoaded("OmniTools"); } public static boolean enableThaumGogglesModule() { boolean defaultval = isThaumCraftLoaded(); return Config.getConfig().get("Special Modules", "Thaumcraft Goggles Module", defaultval).getBoolean(defaultval); } public static boolean vanillaRecipesEnabled() { boolean defaultval = (!isBasicComponentsLoaded()) && (!isIndustrialCraftLoaded()); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Vanilla Recipes", defaultval).getBoolean(defaultval); } private static boolean isAtomicScienceLoaded() { return Loader.isModLoaded("AtomicScience"); } public static boolean UERecipesEnabled() { boolean defaultval = isBasicComponentsLoaded(); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Universal Electricity Recipes", defaultval).getBoolean(defaultval); } public static boolean IC2RecipesEnabled() { boolean defaultval = isIndustrialCraftLoaded() && (!isGregTechLoaded()); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "IndustrialCraft Recipes", defaultval).getBoolean(defaultval); } public static boolean GregTechRecipesEnabled() { boolean defaultval = isGregTechLoaded(); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Gregtech Recipes", defaultval).getBoolean(defaultval); } public static boolean ThermalExpansionRecipesEnabled() { boolean defaultval = isThermalExpansionLoaded(); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Thermal Expansion Recipes", defaultval).getBoolean(defaultval); } public static int joulesToEU(double joules) { return (int) (joules / getIC2Ratio()); } public static double joulesFromEU(int eu) { return getIC2Ratio() * eu; } public static double getUERatio() { return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Energy per UEJ", 1.0).getDouble(1.0); } public static double getIC2Ratio() { return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Energy per IC2 EU", 0.4).getDouble(0.4); } public static double getBCRatio() { return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Energy per MJ", 1.0).getDouble(1.0); } // These 2 elements are basically copied from IC2 api private static Class Ic2Items; public static ItemStack getIC2Item(String name) { try { if (Ic2Items == null) Ic2Items = Class.forName("ic2.core.Ic2Items"); Object ret = Ic2Items.getField(name).get(null); if (ret instanceof ItemStack) { return ((ItemStack) ret).copy(); } else { return null; } } catch (Exception e) { MuseLogger.logError("IC2 API: Call getItem failed for " + name); return null; } } public static ItemStack getGregtechItem(int aIndex, int aAmount, int aMeta) { try { return (ItemStack) Class.forName("gregtechmod.api.GregTech_API") .getMethod("getGregTechItem", new Class[]{Integer.TYPE, Integer.TYPE, Integer.TYPE}) .invoke(null, Integer.valueOf(aIndex), Integer.valueOf(aAmount), Integer.valueOf(aMeta)); } catch (Exception e) { } return null; } public static void registerModSpecificModules() { // Make the IC2 energy ratio show up in config file getBCRatio(); getIC2Ratio(); // Thaumcraft if (isThaumCraftLoaded() && enableThaumGogglesModule()) { ModuleManager.addModule(new ThaumGogglesModule(Collections.singletonList((IModularItem) ModularPowersuits.powerArmorHead))); } //IPowerModule module = new MultimeterModule(Collections.singletonList((IModularItem) ModularPowersuits.powerTool)); // Atomic Science if (isAtomicScienceLoaded()) { ModuleManager.addModule(new HazmatModule(Arrays.<IModularItem>asList(ModularPowersuits.powerArmorHead, ModularPowersuits.powerArmorTorso, ModularPowersuits.powerArmorLegs, ModularPowersuits.powerArmorFeet))); } // Galacticraft if (isGalacticraftLoaded()) { ModuleManager.addModule(new AirtightSealModule(Collections.singletonList((IModularItem) ModularPowersuits.powerArmorHead))); } // Forestry if (isForestryLoaded()) { ModuleManager.addModule(new GrafterModule(Collections.singletonList((IModularItem) ModularPowersuits.powerTool))); ModuleManager.addModule(new ApiaristArmorModule(Arrays.<IModularItem>asList(ModularPowersuits.powerArmorHead, ModularPowersuits.powerArmorTorso, ModularPowersuits.powerArmorLegs, ModularPowersuits.powerArmorFeet))); } try { ModuleManager.addModule(new MFFSFieldTeleporterModule(Collections.singletonList((IModularItem) ModularPowersuits.powerTool))); } catch (Throwable e) { MuseLogger.logError("Failed to get MFFS item!"); } } public static ItemStack getThermexItem(String name, int quantity) { try { ItemStack item = thermalexpansion.api.item.ItemRegistry.getItem(name, quantity); if (item != null) { return item; } } catch (Exception e) { } // thermalexpansion.api.item.ItemRegistry.printItemNames(); MuseLogger.logError("Failed to get Thermal Expansion item " + name); return null; } public static ItemStack getForestryItem(String name, int quantity) { try { ItemStack item = forestry.api.core.ItemInterface.getItem(name); if (item != null) { item.stackSize = quantity; return item; } } catch (Exception e) { } MuseLogger.logError("Failed to get Forestry item " + name); return null; } public static ItemStack getMFFSItem(String name, int quantity) throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException { Object obj = Class.forName("mods.mffs.common.ModularForceFieldSystem").getField("MFFSitemFieldTeleporter").get(null); ItemStack stack = new ItemStack((Item) obj, quantity); return stack; } }
src/minecraft/net/machinemuse/powersuits/common/ModCompatability.java
package net.machinemuse.powersuits.common; import cpw.mods.fml.common.Loader; import net.machinemuse.api.IModularItem; import net.machinemuse.api.ModuleManager; import net.machinemuse.general.MuseLogger; import net.machinemuse.powersuits.powermodule.armor.ApiaristArmorModule; import net.machinemuse.powersuits.powermodule.armor.HazmatModule; import net.machinemuse.powersuits.powermodule.misc.AirtightSealModule; import net.machinemuse.powersuits.powermodule.misc.ThaumGogglesModule; import net.machinemuse.powersuits.powermodule.tool.GrafterModule; import net.machinemuse.powersuits.powermodule.tool.MFFSFieldTeleporterModule; import net.minecraft.item.Item; import net.minecraft.item.ItemStack; import net.minecraftforge.common.Configuration; import java.util.Arrays; import java.util.Collections; public class ModCompatability { public static boolean isGregTechLoaded() { return Loader.isModLoaded("GregTech_Addon"); } public static boolean isBasicComponentsLoaded() { return Loader.isModLoaded("BasicComponents"); } public static boolean isIndustrialCraftLoaded() { return Loader.isModLoaded("IC2"); } public static boolean isThaumCraftLoaded() { return Loader.isModLoaded("Thaumcraft"); } public static boolean isThermalExpansionLoaded() { return Loader.isModLoaded("ThermalExpansion"); } public static boolean isGalacticraftLoaded() { return Loader.isModLoaded("GalacticraftCore"); } public static boolean isForestryLoaded() { return Loader.isModLoaded("Forestry"); } public static boolean isOmniToolsLoaded() { return Loader.isModLoaded("OmniTools"); } public static boolean enableThaumGogglesModule() { boolean defaultval = isThaumCraftLoaded(); return Config.getConfig().get("Special Modules", "Thaumcraft Goggles Module", defaultval).getBoolean(defaultval); } public static boolean vanillaRecipesEnabled() { boolean defaultval = (!isBasicComponentsLoaded()) && (!isIndustrialCraftLoaded()); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Vanilla Recipes", defaultval).getBoolean(defaultval); } private static boolean isAtomicScienceLoaded() { return Loader.isModLoaded("AtomicScience"); } public static boolean UERecipesEnabled() { boolean defaultval = isBasicComponentsLoaded(); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Universal Electricity Recipes", defaultval).getBoolean(defaultval); } public static boolean IC2RecipesEnabled() { boolean defaultval = isIndustrialCraftLoaded() && (!isGregTechLoaded()); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "IndustrialCraft Recipes", defaultval).getBoolean(defaultval); } public static boolean GregTechRecipesEnabled() { boolean defaultval = isGregTechLoaded(); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Gregtech Recipes", defaultval).getBoolean(defaultval); } public static boolean ThermalExpansionRecipesEnabled() { boolean defaultval = isThermalExpansionLoaded(); return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Thermal Expansion Recipes", defaultval).getBoolean(defaultval); } public static int joulesToEU(double joules) { return (int) (joules / getIC2Ratio()); } public static double joulesFromEU(int eu) { return getIC2Ratio() * eu; } public static double getUERatio() { return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Energy per UEJ", 1.0).getDouble(1.0); } public static double getIC2Ratio() { return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Energy per IC2 EU", 0.4).getDouble(0.4); } public static double getBCRatio() { return Config.getConfig().get(Configuration.CATEGORY_GENERAL, "Energy per MJ", 1.0).getDouble(1.0); } // These 2 elements are basically copied from IC2 api private static Class Ic2Items; public static ItemStack getIC2Item(String name) { try { if (Ic2Items == null) Ic2Items = Class.forName("ic2.core.Ic2Items"); Object ret = Ic2Items.getField(name).get(null); if (ret instanceof ItemStack) { return ((ItemStack) ret).copy(); } else { return null; } } catch (Exception e) { MuseLogger.logError("IC2 API: Call getItem failed for " + name); return null; } } public static ItemStack getGregtechItem(int aIndex, int aAmount, int aMeta) { try { return (ItemStack) Class.forName("gregtechmod.GT_Mod") .getMethod("getGregTechItem", new Class[]{Integer.TYPE, Integer.TYPE, Integer.TYPE}) .invoke(null, Integer.valueOf(aIndex), Integer.valueOf(aAmount), Integer.valueOf(aMeta)); } catch (Exception e) { } return null; } public static void registerModSpecificModules() { // Make the IC2 energy ratio show up in config file getBCRatio(); getIC2Ratio(); // Thaumcraft if (isThaumCraftLoaded() && enableThaumGogglesModule()) { ModuleManager.addModule(new ThaumGogglesModule(Collections.singletonList((IModularItem) ModularPowersuits.powerArmorHead))); } //IPowerModule module = new MultimeterModule(Collections.singletonList((IModularItem) ModularPowersuits.powerTool)); // Atomic Science if (isAtomicScienceLoaded()) { ModuleManager.addModule(new HazmatModule(Arrays.<IModularItem>asList(ModularPowersuits.powerArmorHead, ModularPowersuits.powerArmorTorso, ModularPowersuits.powerArmorLegs, ModularPowersuits.powerArmorFeet))); } // Galacticraft if (isGalacticraftLoaded()) { ModuleManager.addModule(new AirtightSealModule(Collections.singletonList((IModularItem) ModularPowersuits.powerArmorHead))); } // Forestry if (isForestryLoaded()) { ModuleManager.addModule(new GrafterModule(Collections.singletonList((IModularItem) ModularPowersuits.powerTool))); ModuleManager.addModule(new ApiaristArmorModule(Arrays.<IModularItem>asList(ModularPowersuits.powerArmorHead, ModularPowersuits.powerArmorTorso, ModularPowersuits.powerArmorLegs, ModularPowersuits.powerArmorFeet))); } try { ModuleManager.addModule(new MFFSFieldTeleporterModule(Collections.singletonList((IModularItem) ModularPowersuits.powerTool))); } catch (Throwable e) { MuseLogger.logError("Failed to get MFFS item!"); } } public static ItemStack getThermexItem(String name, int quantity) { try { ItemStack item = thermalexpansion.api.item.ItemRegistry.getItem(name, quantity); if (item != null) { return item; } } catch (Exception e) { } // thermalexpansion.api.item.ItemRegistry.printItemNames(); MuseLogger.logError("Failed to get Thermal Expansion item " + name); return null; } public static ItemStack getForestryItem(String name, int quantity) { try { ItemStack item = forestry.api.core.ItemInterface.getItem(name); if (item != null) { item.stackSize = quantity; return item; } } catch (Exception e) { } MuseLogger.logError("Failed to get Forestry item " + name); return null; } public static ItemStack getMFFSItem(String name, int quantity) throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException { Object obj = Class.forName("mods.mffs.common.ModularForceFieldSystem").getField("MFFSitemFieldTeleporter").get(null); ItemStack stack = new ItemStack((Item) obj, quantity); return stack; } }
update to gregtech api call
src/minecraft/net/machinemuse/powersuits/common/ModCompatability.java
update to gregtech api call
Java
mit
20a69d80779d74994507651fc5a4d32a3de722c0
0
ofirl/HackerSwamp,ofirl/HackerSwamp
package interface_objects; import managers.Logger; import objects.*; import java.io.Console; import java.util.HashMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedTransferQueue; /** * general class for communication between the {@code processes.WebListener} and {@code worker} dyno * handles the {@link #parseQueue} and {@link #responseHashMap} */ public class Parser { /** * concurrent queue for transferring received commands from {@code processes.WebListener} to workers */ private static LinkedTransferQueue<CommandRequest> parseQueue = new LinkedTransferQueue<>(); /** * concurrent hash map for transferring responses back to the {@code processes.WebListener} */ private static ConcurrentHashMap<String, CommandRequest> responseHashMap = new ConcurrentHashMap<>(); /** * transfers {@code c} to a worker * @param c the element to enqueue */ public static void transferCommand(CommandRequest c) { try { parseQueue.transfer(c); } catch (Exception e) { } } /** * receives a command from the queue, blocking * @return the top command from the queue */ public static CommandRequest receiveCommand() { CommandRequest output = null; try { output = parseQueue.take(); } catch (Exception e) { } return output; } /** * safely enqueues {@code c} to {@link #parseQueue} * @param c the element to enqueue */ public static void responseEnqueue(CommandRequest c) { responseHashMap.put(c.getKey() ,c); } /** * add a response and notify the relevant object * @param key the key to add the response to * @param response the response */ public static void addResponse(String key, String response) { synchronized (responseHashMap.get(key)) { CommandRequest responseCommandRequest = responseHashMap.get(key); responseCommandRequest.response = response; responseCommandRequest.notify(); } } /** * safely dequeues an element from {@link #parseQueue} * @return the top element in {@link #parseQueue} if one exists, null if the queue is empty */ public static CommandRequest responseDequeue(String key) { return responseHashMap.get(key); } /** * returns when response is received for {@code key} * @param key the key to wait for response for * @return a response matching the key */ public static String waitForResponse(String key) { synchronized (responseHashMap.get(key)) { CommandRequest responseCommandRequest = responseHashMap.get(key); try { while (responseCommandRequest.response == null) responseCommandRequest.wait(); } catch (Exception e) { } responseHashMap.remove(key); return responseCommandRequest.response; } } /** * transfers a command to a worker * @param c the command to run * @return response for the command */ public static void addCommand(CommandRequest c) { responseEnqueue(c); transferCommand(c); } /** * requests and waits for response to {@code input} * {@code input} is similar to : authKey(32):JSD97843HJ0843&command(15):system.help * @param input the request received * @return a response */ public static String requestResponse(String input) { Logger.log("Parser.requestResponse", "got input : " + input); // replace '%3a' with ':' input = input.replaceAll("%3a", ":"); Logger.log("Parser.requestResponse", "after replace : " + input); HashMap<String,String> args = decodeArgumentsList(input); // validity check if (!args.containsKey("authKey")) return Parameters.parserErrorNoAuthKey; if (LoginHandler.getUsernameByKey(args.get("authKey")) == null) return Parameters.parserErrorBadAuthKey; if (!args.containsKey("command")) return Parameters.parserErrorInvalidArguments; ActiveUser activeUser = LoginHandler.getActiveUserByKey(args.get("authKey")); CommandContext context = new CommandContext(activeUser.username, activeUser.playerId, activeUser.getLocation()); CommandRequest c = new CommandRequest(args.get("command"), context); addCommand(c); Logger.log("Parser.requestResponse", "waiting for response for : " + c.command); return waitForResponse(c.getKey()); } /** * same as {@link #encodeArgumentList(HashMap)} but for single argument * @param name the name of the argument * @param value the value of the argument * @return encoded argument as string */ public static String encodeArgument(String name, String value) { HashMap<String, String> arg = new HashMap<>(); arg.put(name, value); return encodeArgumentList(arg); } /** * encodes a {@link HashMap} of key value pairs as string for adding to a response * @param args {@link HashMap} of the key value pairs to encode * @return encoded arguments as a string */ public static String encodeArgumentList(HashMap<String, String> args) { String output = ""; for (String key : args.keySet()) { String value = args.get(key); output += key.length() + ":" + value.length() + " " + key + ":" + value + "&"; // key_length:value_length key:value& } if (!output.equals("")) output = output.substring(0, output.length() - 1); return output; } /** * decodes received parameters (in the requestToHandle body) * received parameters are of the form : key_length:value_length key:value&... * @param args arguments string to decode- * @return {@link HashMap} of the keys and values extracted */ public static HashMap<String, String> decodeArgumentsList(String args) { HashMap<String, String> output = new HashMap<>(); int nameStart = 0; while (nameStart < args.length()) { int keyValueStart = args.indexOf(' ', nameStart); String[] lengths = args.substring(nameStart, keyValueStart).split(":"); // key_length:value_length int keyLength = Integer.parseInt(lengths[0]); int valueLength = Integer.parseInt(lengths[1]); String key = args.substring(keyValueStart + 1, keyValueStart + 1 + keyLength); int valueStartIndex = keyValueStart + 2 + keyLength; String value = args.substring(valueStartIndex, valueStartIndex + valueLength); output.put(key, value); nameStart = valueStartIndex + valueLength + 1; } return output; } }
src/main/java/interface_objects/Parser.java
package interface_objects; import managers.Logger; import objects.*; import java.io.Console; import java.util.HashMap; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.LinkedTransferQueue; /** * general class for communication between the {@code processes.WebListener} and {@code worker} dyno * handles the {@link #parseQueue} and {@link #responseHashMap} */ public class Parser { /** * concurrent queue for transferring received commands from {@code processes.WebListener} to workers */ private static LinkedTransferQueue<CommandRequest> parseQueue = new LinkedTransferQueue<>(); /** * concurrent hash map for transferring responses back to the {@code processes.WebListener} */ private static ConcurrentHashMap<String, CommandRequest> responseHashMap = new ConcurrentHashMap<>(); /** * transfers {@code c} to a worker * @param c the element to enqueue */ public static void transferCommand(CommandRequest c) { try { parseQueue.transfer(c); } catch (Exception e) { } } /** * receives a command from the queue, blocking * @return the top command from the queue */ public static CommandRequest receiveCommand() { CommandRequest output = null; try { output = parseQueue.take(); } catch (Exception e) { } return output; } /** * safely enqueues {@code c} to {@link #parseQueue} * @param c the element to enqueue */ public static void responseEnqueue(CommandRequest c) { responseHashMap.put(c.getKey() ,c); } /** * add a response and notify the relevant object * @param key the key to add the response to * @param response the response */ public static void addResponse(String key, String response) { synchronized (responseHashMap.get(key)) { CommandRequest responseCommandRequest = responseHashMap.get(key); responseCommandRequest.response = response; responseCommandRequest.notify(); } } /** * safely dequeues an element from {@link #parseQueue} * @return the top element in {@link #parseQueue} if one exists, null if the queue is empty */ public static CommandRequest responseDequeue(String key) { return responseHashMap.get(key); } /** * returns when response is received for {@code key} * @param key the key to wait for response for * @return a response matching the key */ public static String waitForResponse(String key) { synchronized (responseHashMap.get(key)) { CommandRequest responseCommandRequest = responseHashMap.get(key); try { while (responseCommandRequest.response == null) responseCommandRequest.wait(); } catch (Exception e) { } responseHashMap.remove(key); return responseCommandRequest.response; } } /** * transfers a command to a worker * @param c the command to run * @return response for the command */ public static void addCommand(CommandRequest c) { responseEnqueue(c); transferCommand(c); } /** * requests and waits for response to {@code input} * {@code input} is similar to : authKey(32):JSD97843HJ0843&command(15):system.help * @param input the request received * @return a response */ public static String requestResponse(String input) { Logger.log("Parser.requestResponse", "got input : " + input); HashMap<String,String> args = decodeArgumentsList(input); // validity check if (!args.containsKey("authKey")) return Parameters.parserErrorNoAuthKey; if (LoginHandler.getUsernameByKey(args.get("authKey")) == null) return Parameters.parserErrorBadAuthKey; if (!args.containsKey("command")) return Parameters.parserErrorInvalidArguments; ActiveUser activeUser = LoginHandler.getActiveUserByKey(args.get("authKey")); CommandContext context = new CommandContext(activeUser.username, activeUser.playerId, activeUser.getLocation()); CommandRequest c = new CommandRequest(args.get("command"), context); addCommand(c); Logger.log("Parser.requestResponse", "waiting for response for : " + c.command); return waitForResponse(c.getKey()); } /** * same as {@link #encodeArgumentList(HashMap)} but for single argument * @param name the name of the argument * @param value the value of the argument * @return encoded argument as string */ public static String encodeArgument(String name, String value) { HashMap<String, String> arg = new HashMap<>(); arg.put(name, value); return encodeArgumentList(arg); } /** * encodes a {@link HashMap} of key value pairs as string for adding to a response * @param args {@link HashMap} of the key value pairs to encode * @return encoded arguments as a string */ public static String encodeArgumentList(HashMap<String, String> args) { String output = ""; for (String key : args.keySet()) { String value = args.get(key); output += key.length() + ":" + value.length() + " " + key + ":" + value + "&"; // key_length:value_length key:value& } if (!output.equals("")) output = output.substring(0, output.length() - 1); return output; } /** * decodes received parameters (in the requestToHandle body) * received parameters are of the form : key_length:value_length key:value&... * @param args arguments string to decode- * @return {@link HashMap} of the keys and values extracted */ public static HashMap<String, String> decodeArgumentsList(String args) { HashMap<String, String> output = new HashMap<>(); int nameStart = 0; while (nameStart < args.length()) { int keyValueStart = args.indexOf(' ', nameStart); String[] lengths = args.substring(nameStart, keyValueStart).split(":"); // key_length:value_length int keyLength = Integer.parseInt(lengths[0]); int valueLength = Integer.parseInt(lengths[1]); String key = args.substring(keyValueStart + 1, keyValueStart + 1 + keyLength); int valueStartIndex = keyValueStart + 2 + keyLength; String value = args.substring(valueStartIndex, valueStartIndex + valueLength); output.put(key, value); nameStart = valueStartIndex + valueLength + 1; } return output; } }
html asci fixes
src/main/java/interface_objects/Parser.java
html asci fixes
Java
mit
4101ff2ce1d96a94a1257662149c75ce888a6bba
0
aartikov/Alligator
package me.aartikov.alligator.internal; import java.util.ArrayList; import java.util.List; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import me.aartikov.alligator.NavigationContext; import me.aartikov.alligator.TransitionAnimation; /** * Date: 19.03.2017 * Time: 13:46 * * @author Artur Artikov */ /** * Custom implementation of a fragment backstack with flexible animation control. */ public class FragmentStack { private static final String TAG_PREFIX = "me.aartikov.alligator.internal.FragmentStack.TAG_"; private FragmentManager mFragmentManager; private int mContainerId; public static FragmentStack from(NavigationContext navigationContext) { return new FragmentStack(navigationContext.getFragmentManager(), navigationContext.getContainerId()); } public FragmentStack(FragmentManager fragmentManager, int containerId) { if (fragmentManager == null) { throw new IllegalArgumentException("FragmentManager can't be null."); } if (containerId <= 0) { throw new IllegalArgumentException("ContainerId is not set."); } mFragmentManager = fragmentManager; mContainerId = containerId; } public List<Fragment> getFragments() { List<Fragment> result = new ArrayList<>(); int index = 0; while (true) { String tag = getFragmentTag(index); Fragment fragment = mFragmentManager.findFragmentByTag(tag); if (fragment == null) { break; } if (!fragment.isRemoving()) { result.add(fragment); } index++; } return result; } public int getFragmentCount() { return getFragments().size(); } public Fragment getCurrentFragment() { return mFragmentManager.findFragmentById(mContainerId); } public void pop(TransitionAnimation animation) { List<Fragment> fragments = getFragments(); int count = fragments.size(); if (count == 0) { throw new IllegalStateException("Can't pop fragment when stack is empty."); } Fragment currentFragment = fragments.get(count - 1); Fragment previousFragment = count > 1 ? fragments.get(count - 2) : null; FragmentTransaction transaction = mFragmentManager.beginTransaction(); if (previousFragment != null) { animation.applyBeforeFragmentTransactionExecuted(transaction, previousFragment, currentFragment); } transaction.remove(currentFragment); if (previousFragment != null) { transaction.attach(previousFragment); } transaction.commitNow(); if (previousFragment != null) { animation.applyAfterFragmentTransactionExecuted(previousFragment, currentFragment); } } public void popUntil(Fragment fragment, TransitionAnimation animation) { List<Fragment> fragments = getFragments(); int count = fragments.size(); int index = fragments.indexOf(fragment); if (index == -1) { throw new IllegalArgumentException("Fragment is not found."); } if (index == count - 1) { return; // nothing to do } FragmentTransaction transaction = mFragmentManager.beginTransaction(); for (int i = index + 1; i < count; i++) { if (i == count - 1) { animation.applyBeforeFragmentTransactionExecuted(transaction, fragment, fragments.get(i)); } transaction.remove(fragments.get(i)); } transaction.attach(fragment); transaction.commitNow(); animation.applyAfterFragmentTransactionExecuted(fragment, fragments.get(count - 1)); } public void push(Fragment fragment, TransitionAnimation animation) { Fragment currentFragment = getCurrentFragment(); FragmentTransaction transaction = mFragmentManager.beginTransaction(); if (currentFragment != null) { animation.applyBeforeFragmentTransactionExecuted(transaction, fragment, currentFragment); transaction.detach(currentFragment); } int index = getFragmentCount(); transaction.add(mContainerId, fragment, getFragmentTag(index)); transaction.commitNow(); if (currentFragment != null) { animation.applyAfterFragmentTransactionExecuted(fragment, currentFragment); } } public void replace(Fragment fragment, TransitionAnimation animation) { Fragment currentFragment = getCurrentFragment(); FragmentTransaction transaction = mFragmentManager.beginTransaction(); if (currentFragment != null) { animation.applyBeforeFragmentTransactionExecuted(transaction, currentFragment, fragment); transaction.remove(currentFragment); } int count = getFragmentCount(); int index = count == 0 ? 0 : count - 1; transaction.add(mContainerId, fragment, getFragmentTag(index)); transaction.commitNow(); if (currentFragment != null) { animation.applyAfterFragmentTransactionExecuted(fragment, currentFragment); } } public void reset(Fragment fragment, TransitionAnimation animation) { List<Fragment> fragments = getFragments(); int count = fragments.size(); FragmentTransaction transaction = mFragmentManager.beginTransaction(); for (int i = 0; i < count; i++) { if (i == count - 1) { animation.applyBeforeFragmentTransactionExecuted(transaction, fragment, fragments.get(i)); } transaction.remove(fragments.get(i)); } transaction.add(mContainerId, fragment, getFragmentTag(0)); transaction.commitNow(); if (count > 0) { animation.applyAfterFragmentTransactionExecuted(fragment, fragments.get(count - 1)); } } private String getFragmentTag(int index) { return TAG_PREFIX + mContainerId + "_" + index; } }
alligator/src/main/java/me/aartikov/alligator/internal/FragmentStack.java
package me.aartikov.alligator.internal; import java.util.ArrayList; import java.util.List; import android.support.v4.app.Fragment; import android.support.v4.app.FragmentManager; import android.support.v4.app.FragmentTransaction; import me.aartikov.alligator.NavigationContext; import me.aartikov.alligator.TransitionAnimation; /** * Date: 19.03.2017 * Time: 13:46 * * @author Artur Artikov */ /** * Custom implementation of a fragment backstack with flexible animation control. */ public class FragmentStack { private static final String TAG_PREFIX = "me.aartikov.alligator.internal.FragmentStack.TAG_"; private FragmentManager mFragmentManager; private int mContainerId; public static FragmentStack from(NavigationContext navigationContext) { return new FragmentStack(navigationContext.getFragmentManager(), navigationContext.getContainerId()); } public FragmentStack(FragmentManager fragmentManager, int containerId) { if (fragmentManager == null) { throw new IllegalArgumentException("FragmentManager can't be null."); } if (containerId <= 0) { throw new IllegalArgumentException("ContainerId is not set."); } mFragmentManager = fragmentManager; mContainerId = containerId; } public List<Fragment> getFragments() { List<Fragment> result = new ArrayList<>(); int index = 0; while (true) { String tag = getFragmentTag(index); Fragment fragment = mFragmentManager.findFragmentByTag(tag); if (fragment == null) { break; } if (!fragment.isRemoving()) { result.add(fragment); } index++; } return result; } public int getFragmentCount() { return getFragments().size(); } public Fragment getCurrentFragment() { return mFragmentManager.findFragmentById(mContainerId); } public void pop(TransitionAnimation animation) { List<Fragment> fragments = getFragments(); int count = fragments.size(); if (count == 0) { throw new IllegalStateException("Can't pop fragment when stack is empty."); } Fragment currentFragment = fragments.get(count - 1); Fragment previousFragment = count > 1 ? fragments.get(count - 2) : null; FragmentTransaction transaction = mFragmentManager.beginTransaction(); if (previousFragment != null) { animation.applyBeforeFragmentTransactionExecuted(transaction, previousFragment, currentFragment); } transaction.remove(currentFragment); if (previousFragment != null) { transaction.attach(previousFragment); } transaction.commitNow(); } public void popUntil(Fragment fragment, TransitionAnimation animation) { List<Fragment> fragments = getFragments(); int count = fragments.size(); int index = fragments.indexOf(fragment); if (index == -1) { throw new IllegalArgumentException("Fragment is not found."); } if (index == count - 1) { return; // nothing to do } FragmentTransaction transaction = mFragmentManager.beginTransaction(); for (int i = index + 1; i < count; i++) { if (i == count - 1) { animation.applyBeforeFragmentTransactionExecuted(transaction, fragment, fragments.get(i)); } transaction.remove(fragments.get(i)); } transaction.attach(fragment); transaction.commitNow(); } public void push(Fragment fragment, TransitionAnimation animation) { Fragment currentFragment = getCurrentFragment(); FragmentTransaction transaction = mFragmentManager.beginTransaction(); if (currentFragment != null) { animation.applyBeforeFragmentTransactionExecuted(transaction, fragment, currentFragment); transaction.detach(currentFragment); } int index = getFragmentCount(); transaction.add(mContainerId, fragment, getFragmentTag(index)); transaction.commitNow(); } public void replace(Fragment fragment, TransitionAnimation animation) { Fragment currentFragment = getCurrentFragment(); FragmentTransaction transaction = mFragmentManager.beginTransaction(); if (currentFragment != null) { animation.applyBeforeFragmentTransactionExecuted(transaction, currentFragment, fragment); transaction.remove(currentFragment); } int count = getFragmentCount(); int index = count == 0 ? 0 : count - 1; transaction.add(mContainerId, fragment, getFragmentTag(index)); transaction.commitNow(); } public void reset(Fragment fragment, TransitionAnimation animation) { List<Fragment> fragments = getFragments(); int count = fragments.size(); FragmentTransaction transaction = mFragmentManager.beginTransaction(); for (int i = 0; i < count; i++) { if (i == count - 1) { animation.applyBeforeFragmentTransactionExecuted(transaction, fragment, fragments.get(i)); } transaction.remove(fragments.get(i)); } transaction.add(mContainerId, fragment, getFragmentTag(0)); transaction.commitNow(); } private String getFragmentTag(int index) { return TAG_PREFIX + mContainerId + "_" + index; } }
[fixbug] Call applyAfterFragmentTransactionExecuted in FragmentStack
alligator/src/main/java/me/aartikov/alligator/internal/FragmentStack.java
[fixbug] Call applyAfterFragmentTransactionExecuted in FragmentStack
Java
mit
b55fb99162a66b2c93957abf1a8138ced21281bc
0
ngageoint/geopackage-core-java
package mil.nga.geopackage.extension.elevation; import mil.nga.geopackage.BoundingBox; import mil.nga.geopackage.tiles.TileBoundingBoxUtils; /** * Elevation request to retrieve elevation values for a point or bounding box * * @author osbornb * @since 1.2.1 */ public class ElevationRequest { /** * Bounding box */ private BoundingBox boundingBox; /** * Point flag, true when a single point request */ private boolean point; /** * Bounding box projected to the elevation tiles projection */ private BoundingBox projectedBoundingBox; /** * Constructor * * @param boundingBox * bounding box */ public ElevationRequest(BoundingBox boundingBox) { this.boundingBox = boundingBox; } /** * Constructor * * @param latitude * latitude coordinate * @param longitude * longitude coordinate */ public ElevationRequest(double latitude, double longitude) { this(new BoundingBox(longitude, longitude, latitude, latitude)); point = true; } /** * Get the bounding box * * @return bounding box */ public BoundingBox getBoundingBox() { return boundingBox; } /** * Is the request for a single point * * @return true if a point request */ public boolean isPoint() { return point; } /** * Get the projected bounding box * * @return projected bounding box */ public BoundingBox getProjectedBoundingBox() { return projectedBoundingBox; } /** * Set the projected bounding box * * @param projectedBoundingBox * projected bounding box */ public void setProjectedBoundingBox(BoundingBox projectedBoundingBox) { this.projectedBoundingBox = projectedBoundingBox; } /** * Get the bounding box overlap between the projected bounding box and the * elevation bounding box * * @param projectedElevation * projected elevation * @return overlap bounding box */ public BoundingBox overlap(BoundingBox projectedElevation) { BoundingBox overlap = null; if (point) { overlap = projectedBoundingBox; } else { overlap = TileBoundingBoxUtils.overlap(projectedBoundingBox, projectedElevation); } return overlap; } }
src/main/java/mil/nga/geopackage/extension/elevation/ElevationRequest.java
package mil.nga.geopackage.extension.elevation; import mil.nga.geopackage.BoundingBox; import mil.nga.geopackage.tiles.TileBoundingBoxUtils; /** * Elevation request to retrieve elevation values for a point or bounding box * * @author osbornb * @since 1.2.1 */ public class ElevationRequest { /** * Bounding box */ private BoundingBox boundingBox; /** * Point flag, true when a single point request */ private boolean point; /** * Bounding box projected to the elevation tiles projection */ private BoundingBox projectedBoundingBox; /** * Constructor * * @param boundingBox * bounding box */ public ElevationRequest(BoundingBox boundingBox) { this.boundingBox = boundingBox; } /** * Constructor * * @param latitude * latitude coordinate * @param longitude * longitude coordinate */ public ElevationRequest(double latitude, double longitude) { this(new BoundingBox(longitude, longitude, latitude, latitude)); point = true; } /** * Get the bounding box * * @return bounding box */ public BoundingBox getBoundingBox() { return boundingBox; } /** * Is the request for a single point * * @return true if a point request */ public boolean isPoint() { return point; } /** * Get the projected bounding box * * @return projected bounding box */ public BoundingBox getProjectedBoundingBox() { return projectedBoundingBox; } /** * Set the projected bounding box * * @param projectedBoundingBox * projected bounding box */ public void setProjectedBoundingBox(BoundingBox projectedBoundingBox) { this.projectedBoundingBox = projectedBoundingBox; } /** * Get the bounding box overlap between the projected bounding box and the * elevation bounding box * * @param projectedElevation * projected elevation * @return overlap bounding box */ public BoundingBox overlap(BoundingBox projectedElevation) { BoundingBox overlap = null; if (point) { if (projectedBoundingBox.getMinLatitude() >= projectedElevation .getMinLatitude() && projectedBoundingBox.getMaxLatitude() <= projectedElevation .getMaxLatitude() && projectedBoundingBox.getMinLongitude() >= projectedElevation .getMinLongitude() && projectedBoundingBox.getMaxLongitude() <= projectedElevation .getMaxLongitude()) { overlap = projectedBoundingBox; } } else { overlap = TileBoundingBoxUtils.overlap(projectedBoundingBox, projectedElevation); } return overlap; } }
for point queries use the query as the overlap request on tile borders
src/main/java/mil/nga/geopackage/extension/elevation/ElevationRequest.java
for point queries use the query as the overlap request on tile borders
Java
mit
b025c654fb42ae6396046b3d15d091a3f33d36c3
0
theblackarts/tba3,Art-Nava94/tba3,theblackarts/tba3,theblackarts/tba3
/* * "All that is gold does not glitter, * Not all those who wander are lost; * The old that is strong does not wither, * Deep roots are not reached by the frost. * From the ashes a fire shall be woken, * A light from the shadows shall spring; * Renewed shall be blade that was broken, * The crownless again shall be king." * --J. R. R. Tolkein * * glhf */ package core; import java.util.ArrayList; import java.util.InputMismatchException; import java.util.Scanner; public class Game { // Each game is played by two players private Player playerOne; private Player playerTwo; // Who's turn it is, is based on modulo 2 (0 is player 1's turn, 1 is player 2's turn) private int totalTurns = 0; // Deck Manager private Deck deckManager = new Deck(); // Turn Phases private boolean refreshPhase; private boolean drawPhase; private boolean attackPhase; private boolean minePhase; private boolean purchasePhase; private boolean endPhase; // FTW! boolean win = false; // Game Zones private ArrayList<Card> playerOneInPlayZone = new ArrayList<Card>(); private ArrayList<Card> playerTwoInPlayZone = new ArrayList<Card>(); private ArrayList<Card> playerOneDeadZone = new ArrayList<Card>(); private ArrayList<Card> playerTwoDeadZone = new ArrayList<Card>(); // UI input private Scanner input = new Scanner(System.in); private char decideYN; private int cardChoice; private Card card; // Constructors public Game(Player playerOne, Player playerTwo) { this.playerOne = playerOne; this.playerTwo = playerTwo; } /** * This method starts a game of The Black Arts. * Player's will alternate turns until one of them runs out of HP! * @param playerOne Player one * @param playerTwo Player two */ public void startGame(Player playerOne, Player playerTwo) { // Get each Player's Deck ArrayList<Card> deckOne = playerOne.getDeck(); ArrayList<Card> deckTwo = playerTwo.getDeck(); // Shuffle each Player's Deck deckManager.shuffleDeck(deckOne); deckManager.shuffleDeck(deckTwo); // Deal Each Player's starting hand playerOne.setHand(deckManager.dealSevenCards(deckOne)); playerTwo.setHand(deckManager.dealSevenCards(deckTwo)); // Create a reference variable for each Player's hand ArrayList<Card> handOne = playerOne.getHand(); ArrayList<Card> handTwo = playerTwo.getHand(); // Main game loop that allows players to take turns until one of them goes to 0 HP. gameloop: while (true) { // Display the turn number (e.g. first turn is 1, second turn is 2, and so on) System.out.println(":: Turn :: " + (totalTurns + 1)); if (totalTurns % 2 == 0) { // We know it is playerOne's turn // Announce that it is player one's turn System.out.println("It is " + playerOne.getFirstName() + "'s turn."); /* ======================================================== * PLAYER ONE'S TURN PHASES * ========================================================*/ // ********************* (1) Refresh ********************* startRefreshPhase(playerOneInPlayZone); // ********************* (2) Draw ************************ startDrawPhase(deckOne, handOne); // ********************* (3) Attack ********************** startAttackPhase(playerOneInPlayZone, playerTwoInPlayZone, playerOneDeadZone, playerTwoDeadZone, this.playerOne, this.playerTwo); if (win) break gameloop; // ********************* (4) Mine ************************ startMinePhase(handOne, playerOneInPlayZone); // ********************* (5) Purchase ******************** startPurchasePhase(handOne, playerOneInPlayZone, playerTwoInPlayZone, playerTwoDeadZone); // ********************* (6) End ************************* startEndPhase(); } else if (totalTurns % 2 == 1) { // We know it is playerTwo's turn System.out.println("It is " + playerTwo.getFirstName() + "'s turn."); /* ======================================================== * PLAYER TWO's TURN PHASES * ========================================================*/ // ********************* (1) Refresh ********************* startRefreshPhase(playerTwoInPlayZone); // ********************* (2) Draw ************************ startDrawPhase(deckTwo, handTwo); // ********************* (3) Attack ********************** startAttackPhase(playerTwoInPlayZone, playerOneInPlayZone, playerTwoDeadZone, playerOneDeadZone, this.playerTwo, this.playerOne); if (win) break gameloop; // ********************* (4) Mine ************************ startMinePhase(handTwo, playerTwoInPlayZone); // ********************* (5) Purchase ******************** startPurchasePhase(handTwo, playerTwoInPlayZone, playerOneInPlayZone, playerOneDeadZone); // ********************* (6) End ************************* startEndPhase(); } // Increment totalTurns nextTurn(); } } /* ======================================================== * UI METHODS * ========================================================*/ /** * Allow player to choose a card from his or her hand */ public void selectOneHandCard(ArrayList<Card> hand) { // Prompt the player which card to choose System.out.println("Which card would you like to select?"); System.out.println("HAND"); // Display all the cards with a number for (int i = 0, n = hand.size(); i < n; i++) { System.out.println((i + 1) + ": " + hand.get(i).getCardName()); } // Prompt player for selection System.out.print("Select (enter a number): "); // Get the selection int selection = input.nextInt(); // Get the card Card card = hand.get(selection - 1); // Check if the card is a Monster or not a Monster if (card instanceof Monster) { System.out.println("You picked " + card.getCardName()); System.out.println("It costs " + card.getGoldCost()); } else if (card instanceof Gold) { System.out.println("You picked " + card.getCardName()); } } /* ======================================================== * SETTER AND GETTER METHODS * ========================================================*/ public ArrayList<Card> getPlayerOneInPlayZone() { return playerOneInPlayZone; } public ArrayList<Card> getPlayerTwoInPlayZone() { return playerTwoInPlayZone; } public int getTotalTurns() { return totalTurns; } public void setTotalTurns(int totalTurns) { this.totalTurns = totalTurns; } public Player getPlayerOne() { return playerOne; } public void setPlayerOne(Player playerOne) { this.playerOne = playerOne; } public Player getPlayerTwo() { return playerTwo; } public void setPlayerTwo(Player playerTwo) { this.playerTwo = playerTwo; } /* ======================================================== * GAME UTILITY METHODS * ========================================================*/ /** * Announce the game! */ public void announceGame() { System.out.println("** Welcome to The Dark Arts game **\n"); } /** * Count the amount of unused gold a player has in their play zone */ public int calculateAmountOfUnusedGold(ArrayList<Card> inPlayZone) { int amountOfUnusedGold = 0; for (int i = 0, n = inPlayZone.size(); i < n; i++) { if (inPlayZone.get(i) instanceof Gold) { if (((Gold) inPlayZone.get(i)).isUsed() == false) { amountOfUnusedGold++; } } } return amountOfUnusedGold; } /** * Remove one select card from player one's in play zone */ public void removeOneFromPlayerOneInPlayZone(int card) { this.playerOneInPlayZone.remove(card); } /** * Remove one select card from player two's in play zone */ public void removeOneFromPlayerTwoInPlayZone(int card) { this.playerTwoInPlayZone.remove(card); } /** * Add one card to player one's dead zone */ public void addCardToPlayerOneDeadZone(Card card) { this.playerOneDeadZone.add(card); } /** * Add one card to player two's dead zone */ public void addCardToPlayerTwoDeadZone(Card card) { this.playerTwoDeadZone.add(card); } /* ======================================================== * PHASE METHODS * ========================================================*/ /** * Start the refresh phase for a player's turn. * @param inPlayZone * Refresh Phase: */ public void startRefreshPhase(ArrayList<Card> inPlayZone) { refreshPhase = true; System.out.println("Start [REFRESH PHASE]"); // For each Gold card that playerOne owns, it should go from used to unused // For each Monster it should go from attacked to not attacked for (Card c : inPlayZone) { if (c instanceof Gold) { ((Gold) c).setUsed(false); } else if (c instanceof Monster) { ((Monster) c).setIsAttacked(false); } } // For each card that has a game mechanic that is triggered by Refresh, // it should have it's behavior here refreshPhase = false; // end refresh phase System.out.println("End [REFRESH PHASE]"); } /** * Start a player's draw phase * @param deck * @param hand */ public void startDrawPhase(ArrayList<Card> deck, ArrayList<Card> hand) { drawPhase = true; // begin draw phase System.out.println("Start [DRAW PHASE]"); if (totalTurns != 0) { // if it is not the first turn then deal one card to the Player Card dealtCard = deckManager.dealOneCard(deck); hand.add(dealtCard); System.out.println("You drew a " + dealtCard.getCardName()); } drawPhase = false; // end draw phase // TODO: Implement method to shuffle Dead Zone cards back into the deck when there are no more cards // to draw from a Player's deck. System.out.println("End [DRAW PHASE]"); } /** * Start the attack phase * @param attackerInPlayZone * @param defenderInPlayZone * @param attackerDeadZone * @param defenderDeadZone * @param playerAttack * @param playerDefend */ public void startAttackPhase(ArrayList<Card> attackerInPlayZone, ArrayList<Card> defenderInPlayZone, ArrayList<Card> attackerDeadZone, ArrayList<Card> defenderDeadZone, Player playerAttack, Player playerDefend) { attackPhase = true; System.out.println("Start [ATTACK PHASE]"); Scanner input = new Scanner(System.in); ArrayList<Monster> attackers = new ArrayList<Monster>(); ArrayList<Monster> availableDefenders = new ArrayList<Monster>(); /* Check that there is at least one Monster in play for the attacker * If there is not at least one Monster, skip the Attack phase */ for (int i = 0, n = attackerInPlayZone.size(); i < n; i++) { if (attackerInPlayZone.get(i) instanceof Monster) { // We know there is at least one monster System.out.println("Do you wish to attack (Y/N)"); decideYN = input.next().charAt(0); if (decideYN == 'Y' || decideYN == 'y') { /* ======================================================== * ATTACK PORTION OF ATTACK PHASE * ========================================================*/ // Prompt the attacker to select the Monsters he or she would like to attack with int newHP = 0; int attackValue = 0; System.out.println("Select a set of Monsters to attack with (ex. 1,2; no spaces)"); // Display attacker's monsters that he or she could attack with // TODO: Make this a helper method for (int j = 0; j < n; j++) if (attackerInPlayZone.get(j) instanceof Monster) System.out.println((j + 1) + ": " + attackerInPlayZone.get(j).getCardName()); // Get the input String attackSelectsStr = input.next(); // Parse the input String[] attackSelects = attackSelectsStr.split(","); for (String str : attackSelects) { // Toggle isAttacked for each selected monster from false to true ((Monster)attackerInPlayZone.get(Integer.parseInt(str) - 1)).setIsAttacked(true); // Display which Monsters attacked System.out.println("You attacked with " + attackerInPlayZone.get(Integer.parseInt(str) - 1).getCardName()); // Add the Monsters to the attackers ArrayList Monster myAttackMonster = ((Monster) attackerInPlayZone.get(Integer.parseInt(str) - 1)); attackers.add(myAttackMonster); } /* ======================================================== * DEFENSE PORTION OF ATTACK PHASE * ========================================================*/ /* check that there is at least one monster in play for the defender If there is not at least one monster, skip the defense portion of the attack phase */ for (Card card1 : defenderInPlayZone) { if (card1 instanceof Monster) { for (Card card2 : defenderInPlayZone) { if (card2 instanceof Monster) { Monster myDefendMonster = ((Monster) card2); availableDefenders.add(myDefendMonster); } } /* ------------------------------------------------------------------------ * Print out the columns of the Attackers and Potential Defenders * ------------------------------------------------------------------------ */ // Print the header System.out.format("%-20s%s\n", "Attackers:", "Avail. Defenders:"); // Determine which of the two ArrayLists are longer if (attackers.size() > availableDefenders.size()) { // attackers is the bigger ArrayList for (int j = 0, as = attackers.size(); j < as; j++) { System.out.format("%s %-18s", j, attackers.get(j).getCardName()); if (j >= 0 && j < availableDefenders.size()) { System.out.println((j + 1) + " " + availableDefenders.get(j).getCardName()); } else { System.out.println(); } } } else { // availableDefenders is the bigger ArrayList for (int k = 0, ads = availableDefenders.size(); k < ads; k++) { if (k >= 0 && k < attackers.size()) { System.out.format("%s %-18s", k + 1, attackers.get(k).getCardName()); } else { System.out.format("%-20s", ""); } System.out.println((k + 1) + " " + availableDefenders.get(k).getCardName()); } } break; // since we found one monster for the defender, break out of this loop as // it has served its purpose } } ArrayList<String> myDefendString = new ArrayList<String>(); String defendString; String defendChoice=""; System.out.println("Do you wish to defend (Y/N)"); decideYN = input.next().charAt(0); if (decideYN == 'Y' || decideYN == 'y' ) { System.out.println("How do you want to Defend Ex: (1,2)(2,1)"); defendString = input.next(); for (i = 0; i < defendString.length(); i++) { if (Character.isDigit(defendString.charAt(i)) || defendString.charAt(i) == ',') { defendChoice += defendString.charAt(i); } if (defendString.charAt(i)==')') { myDefendString.add(defendChoice); defendChoice = ""; } } ArrayList<Integer> myDefendIntegerArray = new ArrayList<Integer>(); String strToInt =""; for (int j = 0; j < myDefendString.size(); j++ ) { for (int t = 0; t < myDefendString.get(j).length(); t++) { if (Character.isDigit(myDefendString.get(j).charAt(t))) { strToInt += myDefendString.get(j).charAt(t); } if (myDefendString.get(j).charAt(t) == ',' || t == (myDefendString.get(j).length() - 1)) { myDefendIntegerArray.add(Integer.parseInt(strToInt)); strToInt = ""; } } /* ======================================================== * DAMAGE PORTION OF ATTACK PHASE * ========================================================*/ int currentAttack = attackers.get(myDefendIntegerArray.get(0) - 1).getAttack(); int currentAttackHP = attackers.get(myDefendIntegerArray.get(0) - 1).getHitPoints(); int currentDefenseHP; int currentDefenseAttack; for (int f = 1; f < myDefendIntegerArray.size(); f++) { currentDefenseAttack = availableDefenders.get(myDefendIntegerArray.get(f) - 1).getAttack(); currentDefenseHP = availableDefenders.get(myDefendIntegerArray.get(f) - 1).getHitPoints(); currentAttackHP -= currentDefenseAttack; currentDefenseHP -= currentAttack; // kill the defending monster that ran out of HP if (currentDefenseHP <= 0) { defenderInPlayZone.remove(availableDefenders.get(myDefendIntegerArray.get(f) - 1)); defenderDeadZone.add(availableDefenders.get(myDefendIntegerArray.get(f) - 1)); } currentAttack = (-1 * currentDefenseHP); } // kill the attacking monster that ran out of HP if (currentAttackHP <= 0) { attackerInPlayZone.remove(attackers.get(myDefendIntegerArray.get(0) - 1)); attackerDeadZone.add(attackers.get(myDefendIntegerArray.get(0) - 1)); } attackers.remove(myDefendIntegerArray.get(0) - 1); myDefendIntegerArray.clear(); System.out.println(""); } } else System.out.println("Defend phase over"); for (int f = 0; f < attackers.size(); f++) { attackValue += attackers.get(f).getAttack(); } newHP = playerDefend.getHitPoints() - attackValue; // Win condition if (newHP <= 0) { // Attacking player wins! System.out.println(playerAttack.getFirstName() + " wins!"); win = true; break; } else { playerDefend.setHitPoints(newHP); System.out.println("Damage has been assigned, " + playerDefend.getFirstName() + " you are now at " + newHP + " HP"); break; // Since we found one monster for the attacker, break out of this loop as // it has served its purpose } } } System.out.println(attackerInPlayZone); System.out.println(defenderInPlayZone); System.out.println(attackerDeadZone); System.out.println(defenderDeadZone); attackPhase = false; System.out.println("End [ATTACK PHASE]"); } } /** * Start the mine phase for a player, allowing that player to play one gold card (if they have one) from their * hand into their play zone. * @param hand * @param inPlayZone */ public void startMinePhase(ArrayList<Card> hand, ArrayList<Card> inPlayZone) { Card card; // used for storing a card selected by a player (remove the card, add the card, print the card) System.out.println("Start [MINE PHASE]"); /* For each card in Player One's hand, is there at least one Gold card? * If yes, give the Player an option to play it and stop checking for Gold cards * NOTE: There is no way to bluff using this system */ for (int i = 0, n = hand.size(); i < n; i++) { card = hand.get(i); if (card instanceof Gold) { System.out.println("You have a Gold card to play, would you like to play it? Y/N: "); decideYN = input.next().charAt(0); if (decideYN == 'Y' || decideYN == 'y') { // Remove the card from a Player's hand hand.remove(i); // Add the card to play zone inPlayZone.add(card); break; } } } minePhase = false; // end minePhase System.out.println("End [MINE PHASE]"); } /** * Start the purchase phase for a player, allowing them to purchase an * arbitrary number of cards, subject to the amount of unused gold they have available. * @param hand * @param inPlayZone */ public void startPurchasePhase(ArrayList<Card> hand, ArrayList<Card> inPlayZone, ArrayList<Card> inSelectPlayZone, ArrayList<Card> deadZone) { /* * We need a way to handle the purchase of Action cards differently than * Monster cards. Action cards are basically an object with a method that * affects the state of the game -- for instance killing a monster * If purchase action card, and that action card is say execute, * we need to run the kill monster method. * * Also, this code is getting really procedural, it would be easier to read * if we refactored the parts of it that repeat. */ purchasePhase = true; System.out.println("Start [PURCHASE PHASE]"); // Get the amount of gold that the player has at the start of his // or her purchase phase int amountOfUnusedGold = calculateAmountOfUnusedGold(inPlayZone); int cardCost; boolean isAtLeastOneAffordable = false; // While player has at least 1 or more unused Gold in play run through the purchase loop while (amountOfUnusedGold > 0) { // Check that there is at least one affordable card for (Card card : hand) if (card.getGoldCost() <= amountOfUnusedGold && !(card instanceof Gold)) { // Added <= instead of <, can revert back if this doesnt work isAtLeastOneAffordable = true; break; // Added this break to make it a lazy evaluation } // If there is at least one affordable purchasebale card allow player to purchase it if (isAtLeastOneAffordable) { /* For each card in Player's hand that has a cost (Monster, Action, Accessory), * display it along with an integer value that will act as an affordance to select * and pay for it, allowing the player to bring a card into play. */ for (int i = 0, n = hand.size(); i < n; i++) { if (hand.get(i) instanceof Monster || hand.get(i) instanceof Accessory || hand.get(i) instanceof Execute) { System.out.println((i + 1) + ": " + hand.get(i).getCardName() + ", " + hand.get(i).getGoldCost()); } } // Check if there are no purchaseable (Monster, Action, Accessory cards in hand, break if (!isAPurchaseableInHand(hand)) { break; } // Prompt the user for input System.out.print("Pick a card by typing the associated integer value: "); // Get the Player's card choice boolean check = false; while (check == false) { try { cardChoice = input.nextInt(); check = true; } catch (InputMismatchException e) { System.out.println("Please input an integer value"); input.nextLine(); } } // Store the card in a variable that the Player selected card = hand.get(cardChoice - 1); // Subtract one to account for 0 index // Store the card cost cardCost = card.getGoldCost(); // Does the player have enough unused gold to purchase the selected card? if (cardCost <= amountOfUnusedGold) { // Hey, you owe the game some GOLD! Pay this off!! int unpaidAmount = cardCost; // Pay for the card for (int i = 0, n = inPlayZone.size(); i < n && unpaidAmount != 0; i++) { if (inPlayZone.get(i) instanceof Gold) { if (!((Gold) inPlayZone.get(i)).isUsed()) { // Set the gold card from used is false to used is true ((Gold) inPlayZone.get(i)).setUsed(true); unpaidAmount--; // Now you owe us less, does your wallet feel lighter? } } } // Update value of amountOfUnusedGold (important for while loop to work) amountOfUnusedGold = calculateAmountOfUnusedGold(inPlayZone); // Remove the paid for card from the player's hand hand.remove(card); // Case when it is a monster if (card instanceof Monster) { // Add the paid for card to the player's play zone inPlayZone.add(card); // We'll need to check again, now that we have purchased something if we can still afford anything isAtLeastOneAffordable = false; System.out.println("You played a " + card.getCardName() + " to your play zone."); // Case when it is an Action Card Execute } else if (card instanceof Execute) { ((Execute) card).killSelectMonster(inSelectPlayZone, deadZone); } } else { // Player does not have enough unused gold to pay for the card System.out.println("You do not have enough unused gold to pay for " + card.getCardName()); break; } } else { break; } } // Print all cards in the player's play zone System.out.println("The following are the cards you have in play: "); for (Card card : inPlayZone) System.out.println(card.getCardName()); // End the purchase phase purchasePhase = false; System.out.println("End [PURCHASE PHASE]"); } /** * Helper method for startPurchasePhase method * @param hand * @return */ private boolean isAPurchaseableInHand(ArrayList<Card> hand) { boolean yesAtLeastOnePurchaseable = false; for (int i = 0, n = hand.size(); i < n; i++) if (hand.get(i) instanceof Monster || hand.get(i) instanceof Execute || hand.get(i) instanceof Accessory) yesAtLeastOnePurchaseable = true; return yesAtLeastOnePurchaseable; } /** * Start the end phase for a player's turn. */ public void startEndPhase() { endPhase = true; // TODO: Implement a mechanism to allow player to discard down to 7 cards (this is the max hand size for a game) System.out.println("Start [END PHASE]"); // Give player one the option to pass his or her turn do { System.out.print("Would you like to pass your turn? (Y/N):"); decideYN = input.next().charAt(0); // VALIDATE that this is working as intended, getting one char } while (!(decideYN == 'Y' || decideYN == 'y')); endPhase = false; System.out.println("End [END PHASE]"); } /** * Increment the number of total turns for a game */ public void nextTurn() { this.totalTurns++; } }
TheBlackArts/src/core/Game.java
/* * "All that is gold does not glitter, * Not all those who wander are lost; * The old that is strong does not wither, * Deep roots are not reached by the frost. * From the ashes a fire shall be woken, * A light from the shadows shall spring; * Renewed shall be blade that was broken, * The crownless again shall be king." * --J. R. R. Tolkein * * glhf */ package core; import java.util.ArrayList; import java.util.InputMismatchException; import java.util.Scanner; public class Game { // Each game is played by two players private Player playerOne; private Player playerTwo; // Who's turn it is, is based on modulo 2 (0 is player 1's turn, 1 is player 2's turn) private int totalTurns = 0; // Deck Manager private Deck deckManager = new Deck(); // Turn Phases private boolean refreshPhase; private boolean drawPhase; private boolean attackPhase; private boolean minePhase; private boolean purchasePhase; private boolean endPhase; // FTW! boolean win = false; // Game Zones private ArrayList<Card> playerOneInPlayZone = new ArrayList<Card>(); private ArrayList<Card> playerTwoInPlayZone = new ArrayList<Card>(); private ArrayList<Card> playerOneDeadZone = new ArrayList<Card>(); private ArrayList<Card> playerTwoDeadZone = new ArrayList<Card>(); // UI input private Scanner input = new Scanner(System.in); private char decideYN; private int cardChoice; private Card card; // Constructors public Game(Player playerOne, Player playerTwo) { this.playerOne = playerOne; this.playerTwo = playerTwo; } /** * This method starts a game of The Black Arts. * Player's will alternate turns until one of them runs out of HP! * @param playerOne Player one * @param playerTwo Player two */ public void startGame(Player playerOne, Player playerTwo) { // Get each Player's Deck ArrayList<Card> deckOne = playerOne.getDeck(); ArrayList<Card> deckTwo = playerTwo.getDeck(); // Shuffle each Player's Deck deckManager.shuffleDeck(deckOne); deckManager.shuffleDeck(deckTwo); // Deal Each Player's starting hand playerOne.setHand(deckManager.dealSevenCards(deckOne)); playerTwo.setHand(deckManager.dealSevenCards(deckTwo)); // Create a reference variable for each Player's hand ArrayList<Card> handOne = playerOne.getHand(); ArrayList<Card> handTwo = playerTwo.getHand(); // Main game loop that allows players to take turns until one of them goes to 0 HP. gameloop: while (true) { // Display the turn number (e.g. first turn is 1, second turn is 2, and so on) System.out.println(":: Turn :: " + (totalTurns + 1)); if (totalTurns % 2 == 0) { // We know it is playerOne's turn // Announce that it is player one's turn System.out.println("It is " + playerOne.getFirstName() + "'s turn."); /* ======================================================== * PLAYER ONE'S TURN PHASES * ========================================================*/ // ********************* (1) Refresh ********************* startRefreshPhase(playerOneInPlayZone); // ********************* (2) Draw ************************ startDrawPhase(deckOne, handOne); // ********************* (3) Attack ********************** startAttackPhase(playerOneInPlayZone, playerTwoInPlayZone, playerOneDeadZone, playerTwoDeadZone, this.playerOne, this.playerTwo); if (win) break gameloop; // ********************* (4) Mine ************************ startMinePhase(handOne, playerOneInPlayZone); // ********************* (5) Purchase ******************** startPurchasePhase(handOne, playerOneInPlayZone, playerTwoInPlayZone, playerTwoDeadZone); // ********************* (6) End ************************* startEndPhase(); } else if (totalTurns % 2 == 1) { // We know it is playerTwo's turn System.out.println("It is " + playerTwo.getFirstName() + "'s turn."); /* ======================================================== * PLAYER TWO's TURN PHASES * ========================================================*/ // ********************* (1) Refresh ********************* startRefreshPhase(playerTwoInPlayZone); // ********************* (2) Draw ************************ startDrawPhase(deckTwo, handTwo); // ********************* (3) Attack ********************** startAttackPhase(playerTwoInPlayZone, playerOneInPlayZone, playerTwoDeadZone, playerOneDeadZone, this.playerTwo, this.playerOne); if (win) break gameloop; // ********************* (4) Mine ************************ startMinePhase(handTwo, playerTwoInPlayZone); // ********************* (5) Purchase ******************** startPurchasePhase(handTwo, playerTwoInPlayZone, playerOneInPlayZone, playerOneDeadZone); // ********************* (6) End ************************* startEndPhase(); } // Increment totalTurns nextTurn(); } } /* ======================================================== * UI METHODS * ========================================================*/ /** * Allow player to choose a card from his or her hand */ public void selectOneHandCard(ArrayList<Card> hand) { // Prompt the player which card to choose System.out.println("Which card would you like to select?"); System.out.println("HAND"); // Display all the cards with a number for (int i = 0, n = hand.size(); i < n; i++) { System.out.println((i + 1) + ": " + hand.get(i).getCardName()); } // Prompt player for selection System.out.print("Select (enter a number): "); // Get the selection int selection = input.nextInt(); // Get the card Card card = hand.get(selection - 1); // Check if the card is a Monster or not a Monster if (card instanceof Monster) { System.out.println("You picked " + card.getCardName()); System.out.println("It costs " + card.getGoldCost()); } else if (card instanceof Gold) { System.out.println("You picked " + card.getCardName()); } } /* ======================================================== * SETTER AND GETTER METHODS * ========================================================*/ public ArrayList<Card> getPlayerOneInPlayZone() { return playerOneInPlayZone; } public ArrayList<Card> getPlayerTwoInPlayZone() { return playerTwoInPlayZone; } public int getTotalTurns() { return totalTurns; } public void setTotalTurns(int totalTurns) { this.totalTurns = totalTurns; } public Player getPlayerOne() { return playerOne; } public void setPlayerOne(Player playerOne) { this.playerOne = playerOne; } public Player getPlayerTwo() { return playerTwo; } public void setPlayerTwo(Player playerTwo) { this.playerTwo = playerTwo; } /* ======================================================== * GAME UTILITY METHODS * ========================================================*/ /** * Announce the game! */ public void announceGame() { System.out.println("** Welcome to The Dark Arts game **\n"); } /** * Count the amount of unused gold a player has in their play zone */ public int calculateAmountOfUnusedGold(ArrayList<Card> inPlayZone) { int amountOfUnusedGold = 0; for (int i = 0, n = inPlayZone.size(); i < n; i++) { if (inPlayZone.get(i) instanceof Gold) { if (((Gold) inPlayZone.get(i)).isUsed() == false) { amountOfUnusedGold++; } } } return amountOfUnusedGold; } /** * Remove one select card from player one's in play zone */ public void removeOneFromPlayerOneInPlayZone(int card) { this.playerOneInPlayZone.remove(card); } /** * Remove one select card from player two's in play zone */ public void removeOneFromPlayerTwoInPlayZone(int card) { this.playerTwoInPlayZone.remove(card); } /** * Add one card to player one's dead zone */ public void addCardToPlayerOneDeadZone(Card card) { this.playerOneDeadZone.add(card); } /** * Add one card to player two's dead zone */ public void addCardToPlayerTwoDeadZone(Card card) { this.playerTwoDeadZone.add(card); } /* ======================================================== * PHASE METHODS * ========================================================*/ /** * Start the refresh phase for a player's turn. * @param inPlayZone * Refresh Phase: */ public void startRefreshPhase(ArrayList<Card> inPlayZone) { refreshPhase = true; System.out.println("Start [REFRESH PHASE]"); // For each Gold card that playerOne owns, it should go from used to unused // For each Monster it should go from attacked to not attacked for (Card c : inPlayZone) { if (c instanceof Gold) { ((Gold) c).setUsed(false); } else if (c instanceof Monster) { ((Monster) c).setIsAttacked(false); } } // For each card that has a game mechanic that is triggered by Refresh, // it should have it's behavior here refreshPhase = false; // end refresh phase System.out.println("End [REFRESH PHASE]"); } /** * Start a player's draw phase * @param deck * @param hand */ public void startDrawPhase(ArrayList<Card> deck, ArrayList<Card> hand) { drawPhase = true; // begin draw phase System.out.println("Start [DRAW PHASE]"); if (totalTurns != 0) { // if it is not the first turn then deal one card to the Player Card dealtCard = deckManager.dealOneCard(deck); hand.add(dealtCard); System.out.println("You drew a " + dealtCard.getCardName()); } drawPhase = false; // end draw phase // TODO: Implement method to shuffle Dead Zone cards back into the deck when there are no more cards // to draw from a Player's deck. System.out.println("End [DRAW PHASE]"); } /** * Start the attack phase * @param attackerInPlayZone * @param defenderInPlayZone * @param attackerDeadZone * @param defenderDeadZone * @param playerAttack * @param playerDefend */ public void startAttackPhase(ArrayList<Card> attackerInPlayZone, ArrayList<Card> defenderInPlayZone, ArrayList<Card> attackerDeadZone, ArrayList<Card> defenderDeadZone, Player playerAttack, Player playerDefend) { attackPhase = true; System.out.println("Start [ATTACK PHASE]"); Scanner input = new Scanner(System.in); ArrayList<Monster> attackers = new ArrayList<Monster>(); ArrayList<Monster> availableDefenders = new ArrayList<Monster>(); /* Check that there is at least one Monster in play for the attacker * If there is not at least one Monster, skip the Attack phase */ for (int i = 0, n = attackerInPlayZone.size(); i < n; i++) { if (attackerInPlayZone.get(i) instanceof Monster) { // We know there is at least one monster System.out.println("Do you wish to attack (Y/N)"); decideYN = input.next().charAt(0); if (decideYN == 'Y' || decideYN == 'y') { /* ======================================================== * ATTACK PORTION OF ATTACK PHASE * ========================================================*/ // Prompt the attacker to select the Monsters he or she would like to attack with int newHP = 0; int attackValue = 0; System.out.println("Select a set of Monsters to attack with (ex. 1,2; no spaces)"); // Display attacker's monsters that he or she could attack with // TODO: Make this a helper method for (int j = 0; j < n; j++) if (attackerInPlayZone.get(j) instanceof Monster) System.out.println((j + 1) + ": " + attackerInPlayZone.get(j).getCardName()); // Get the input String attackSelectsStr = input.next(); // Parse the input String[] attackSelects = attackSelectsStr.split(","); for (String str : attackSelects) { // Toggle isAttacked for each selected monster from false to true ((Monster)attackerInPlayZone.get(Integer.parseInt(str) - 1)).setIsAttacked(true); // Display which Monsters attacked System.out.println("You attacked with " + attackerInPlayZone.get(Integer.parseInt(str) - 1).getCardName()); // Add the Monsters to the attackers ArrayList Monster myAttackMonster = ((Monster) attackerInPlayZone.get(Integer.parseInt(str) - 1)); attackers.add(myAttackMonster); } /* ======================================================== * DEFENSE PORTION OF ATTACK PHASE * ========================================================*/ /* check that there is at least one monster in play for the defender If there is not at least one monster, skip the defense portion of the attack phase */ for (Card card1 : defenderInPlayZone) { if (card1 instanceof Monster) { for (Card card2 : defenderInPlayZone) { if (card2 instanceof Monster) { Monster myDefendMonster = ((Monster) card2); availableDefenders.add(myDefendMonster); } } /* ------------------------------------------------------------------------ * Print out the columns of the Attackers and Potential Defenders * ------------------------------------------------------------------------ */ // Print the header System.out.format("%-20s%s\n", "Attackers:", "Avail. Defenders:"); // Determine which of the two ArrayLists are longer if (attackers.size() > availableDefenders.size()) { // attackers is the bigger ArrayList for (int j = 0, as = attackers.size(); j < as; j++) { System.out.format("%s %-18s", j, attackers.get(j).getCardName()); if (j >= 0 && j < availableDefenders.size()) { System.out.println((j + 1) + " " + availableDefenders.get(j).getCardName()); } else { System.out.println(); } } } else { // availableDefenders is the bigger ArrayList for (int k = 0, ads = availableDefenders.size(); k < ads; k++) { if (k >= 0 && k < attackers.size()) { System.out.format("%s %-18s", k + 1, attackers.get(k).getCardName()); } else { System.out.format("%-20s", ""); } System.out.println((k + 1) + " " + availableDefenders.get(k).getCardName()); } } break; // since we found one monster for the defender, break out of this loop as // it has served its purpose } } ArrayList<String> myDefendString = new ArrayList<String>(); String defendString; String defendChoice=""; System.out.println("Do you wish to defend (Y/N)"); decideYN = input.next().charAt(0); if (decideYN == 'Y' || decideYN == 'y' ) { System.out.println("How do you want to Defend Ex: (1,2)(2,1)"); defendString = input.next(); for (i = 0; i < defendString.length(); i++) { if (Character.isDigit(defendString.charAt(i)) || defendString.charAt(i) == ',') { defendChoice += defendString.charAt(i); } if (defendString.charAt(i)==')') { myDefendString.add(defendChoice); defendChoice = ""; } } ArrayList<Integer> myDefendIntegerArray = new ArrayList<Integer>(); String strToInt =""; for (int j = 0; j < myDefendString.size(); j++ ) { for (int t = 0; t < myDefendString.get(j).length(); t++) { if (Character.isDigit(myDefendString.get(j).charAt(t))) { strToInt += myDefendString.get(j).charAt(t); } if (myDefendString.get(j).charAt(t) == ',' || t == (myDefendString.get(j).length() - 1)) { myDefendIntegerArray.add(Integer.parseInt(strToInt)); strToInt = ""; } } /* ======================================================== * DAMAGE PORTION OF ATTACK PHASE * ========================================================*/ int currentAttack = attackers.get(myDefendIntegerArray.get(0) - 1).getAttack(); int currentAttackHP = attackers.get(myDefendIntegerArray.get(0) - 1).getHitPoints(); int currentDefenseHP; int currentDefenseAttack; for (int f = 1; f < myDefendIntegerArray.size(); f++) { currentDefenseAttack = availableDefenders.get(myDefendIntegerArray.get(f) - 1).getAttack(); currentDefenseHP = availableDefenders.get(myDefendIntegerArray.get(f) - 1).getHitPoints(); currentAttackHP -= currentDefenseAttack; currentDefenseHP -= currentAttack; // kill the defending monster that ran out of HP if (currentDefenseHP <= 0) { defenderInPlayZone.remove(availableDefenders.get(myDefendIntegerArray.get(f) - 1)); defenderDeadZone.add(availableDefenders.get(myDefendIntegerArray.get(f) - 1)); } currentAttack = (-1 * currentDefenseHP); } // kill the attacking monster that ran out of HP if (currentAttackHP <= 0) { attackerInPlayZone.remove(attackers.get(myDefendIntegerArray.get(0) - 1)); attackerDeadZone.add(attackers.get(myDefendIntegerArray.get(0) - 1)); } attackers.remove(myDefendIntegerArray.get(0) - 1); myDefendIntegerArray.clear(); System.out.println(""); } } else System.out.println("Defend phase over"); for (int f = 0; f < attackers.size(); f++) { attackValue += attackers.get(f).getAttack(); } newHP = playerDefend.getHitPoints() - attackValue; // Win condition if (newHP <= 0) { // Attacking player wins! System.out.println(playerAttack.getFirstName() + " wins!"); win = true; } else { playerDefend.setHitPoints(newHP); System.out.println("Damage has been assigned, " + playerDefend.getFirstName() + " you are now at " + newHP + " HP"); break; // Since we found one monster for the attacker, break out of this loop as // it has served its purpose } } } System.out.println(attackerInPlayZone); System.out.println(defenderInPlayZone); System.out.println(attackerDeadZone); System.out.println(defenderDeadZone); attackPhase = false; System.out.println("End [ATTACK PHASE]"); } } /** * Start the mine phase for a player, allowing that player to play one gold card (if they have one) from their * hand into their play zone. * @param hand * @param inPlayZone */ public void startMinePhase(ArrayList<Card> hand, ArrayList<Card> inPlayZone) { Card card; // used for storing a card selected by a player (remove the card, add the card, print the card) System.out.println("Start [MINE PHASE]"); /* For each card in Player One's hand, is there at least one Gold card? * If yes, give the Player an option to play it and stop checking for Gold cards * NOTE: There is no way to bluff using this system */ for (int i = 0, n = hand.size(); i < n; i++) { card = hand.get(i); if (card instanceof Gold) { System.out.println("You have a Gold card to play, would you like to play it? Y/N: "); decideYN = input.next().charAt(0); if (decideYN == 'Y' || decideYN == 'y') { // Remove the card from a Player's hand hand.remove(i); // Add the card to play zone inPlayZone.add(card); break; } } } minePhase = false; // end minePhase System.out.println("End [MINE PHASE]"); } /** * Start the purchase phase for a player, allowing them to purchase an * arbitrary number of cards, subject to the amount of unused gold they have available. * @param hand * @param inPlayZone */ public void startPurchasePhase(ArrayList<Card> hand, ArrayList<Card> inPlayZone, ArrayList<Card> inSelectPlayZone, ArrayList<Card> deadZone) { /* * We need a way to handle the purchase of Action cards differently than * Monster cards. Action cards are basically an object with a method that * affects the state of the game -- for instance killing a monster * If purchase action card, and that action card is say execute, * we need to run the kill monster method. * * Also, this code is getting really procedural, it would be easier to read * if we refactored the parts of it that repeat. */ purchasePhase = true; System.out.println("Start [PURCHASE PHASE]"); // Get the amount of gold that the player has at the start of his // or her purchase phase int amountOfUnusedGold = calculateAmountOfUnusedGold(inPlayZone); int cardCost; boolean isAtLeastOneAffordable = false; // While player has at least 1 or more unused Gold in play run through the purchase loop while (amountOfUnusedGold > 0) { // Check that there is at least one affordable card for (Card card : hand) if (card.getGoldCost() <= amountOfUnusedGold && !(card instanceof Gold)) { // Added <= instead of <, can revert back if this doesnt work isAtLeastOneAffordable = true; break; // Added this break to make it a lazy evaluation } // If there is at least one affordable purchasebale card allow player to purchase it if (isAtLeastOneAffordable) { /* For each card in Player's hand that has a cost (Monster, Action, Accessory), * display it along with an integer value that will act as an affordance to select * and pay for it, allowing the player to bring a card into play. */ for (int i = 0, n = hand.size(); i < n; i++) { if (hand.get(i) instanceof Monster || hand.get(i) instanceof Accessory || hand.get(i) instanceof Execute) { System.out.println((i + 1) + ": " + hand.get(i).getCardName() + ", " + hand.get(i).getGoldCost()); } } // Check if there are no purchaseable (Monster, Action, Accessory cards in hand, break if (!isAPurchaseableInHand(hand)) { break; } // Prompt the user for input System.out.print("Pick a card by typing the associated integer value: "); // Get the Player's card choice boolean check = false; while (check == false) { try { cardChoice = input.nextInt(); check = true; } catch (InputMismatchException e) { System.out.println("Please input an integer value"); input.nextLine(); } } // Store the card in a variable that the Player selected card = hand.get(cardChoice - 1); // Subtract one to account for 0 index // Store the card cost cardCost = card.getGoldCost(); // Does the player have enough unused gold to purchase the selected card? if (cardCost <= amountOfUnusedGold) { // Hey, you owe the game some GOLD! Pay this off!! int unpaidAmount = cardCost; // Pay for the card for (int i = 0, n = inPlayZone.size(); i < n && unpaidAmount != 0; i++) { if (inPlayZone.get(i) instanceof Gold) { if (!((Gold) inPlayZone.get(i)).isUsed()) { // Set the gold card from used is false to used is true ((Gold) inPlayZone.get(i)).setUsed(true); unpaidAmount--; // Now you owe us less, does your wallet feel lighter? } } } // Update value of amountOfUnusedGold (important for while loop to work) amountOfUnusedGold = calculateAmountOfUnusedGold(inPlayZone); // Remove the paid for card from the player's hand hand.remove(card); // Case when it is a monster if (card instanceof Monster) { // Add the paid for card to the player's play zone inPlayZone.add(card); // We'll need to check again, now that we have purchased something if we can still afford anything isAtLeastOneAffordable = false; System.out.println("You played a " + card.getCardName() + " to your play zone."); // Case when it is an Action Card Execute } else if (card instanceof Execute) { ((Execute) card).killSelectMonster(inSelectPlayZone, deadZone); } } else { // Player does not have enough unused gold to pay for the card System.out.println("You do not have enough unused gold to pay for " + card.getCardName()); break; } } else { break; } } // Print all cards in the player's play zone System.out.println("The following are the cards you have in play: "); for (Card card : inPlayZone) System.out.println(card.getCardName()); // End the purchase phase purchasePhase = false; System.out.println("End [PURCHASE PHASE]"); } /** * Helper method for startPurchasePhase method * @param hand * @return */ private boolean isAPurchaseableInHand(ArrayList<Card> hand) { boolean yesAtLeastOnePurchaseable = false; for (int i = 0, n = hand.size(); i < n; i++) if (hand.get(i) instanceof Monster || hand.get(i) instanceof Execute || hand.get(i) instanceof Accessory) yesAtLeastOnePurchaseable = true; return yesAtLeastOnePurchaseable; } /** * Start the end phase for a player's turn. */ public void startEndPhase() { endPhase = true; // TODO: Implement a mechanism to allow player to discard down to 7 cards (this is the max hand size for a game) System.out.println("Start [END PHASE]"); // Give player one the option to pass his or her turn do { System.out.print("Would you like to pass your turn? (Y/N):"); decideYN = input.next().charAt(0); // VALIDATE that this is working as intended, getting one char } while (!(decideYN == 'Y' || decideYN == 'y')); endPhase = false; System.out.println("End [END PHASE]"); } /** * Increment the number of total turns for a game */ public void nextTurn() { this.totalTurns++; } }
Implement win condition
TheBlackArts/src/core/Game.java
Implement win condition
Java
mit
38b3b19c3a6bad80e204fe8299958ca885b985ac
0
msimonides/homerplayer
package com.studio4plus.homerplayer.ui; import android.annotation.SuppressLint; import android.app.AlertDialog; import android.content.SharedPreferences; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.preference.ListPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.preference.SwitchPreference; import android.view.View; import android.view.WindowManager; import android.widget.Toast; import com.google.common.base.Preconditions; import com.studio4plus.homerplayer.BuildConfig; import com.studio4plus.homerplayer.GlobalSettings; import com.studio4plus.homerplayer.HomerPlayerApplication; import com.studio4plus.homerplayer.HomerPlayerDeviceAdmin; import com.studio4plus.homerplayer.R; import com.studio4plus.homerplayer.events.DeviceAdminChangeEvent; import com.studio4plus.homerplayer.events.SettingsEnteredEvent; import com.studio4plus.homerplayer.model.AudioBookManager; import javax.inject.Inject; import de.greenrobot.event.EventBus; public class SettingsActivity extends BaseActivity { // Pseudo preferences that don't change any preference values directly. private static final String KEY_KIOSK_MODE_SCREEN = "kiosk_mode_screen"; private static final String KEY_UNREGISTER_DEVICE_OWNER = "unregister_device_owner_preference"; private static final String KEY_RESET_ALL_BOOK_PROGRESS = "reset_all_book_progress_preference"; private static final String KEY_VERSION = "version_preference"; private static final int BLOCK_TIME_MS = 500; private Handler mainThreadHandler; private Runnable unblockEventsTask; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Display the fragment as the main content. getFragmentManager().beginTransaction() .replace(android.R.id.content, new SettingsFragment()) .commit(); mainThreadHandler = new Handler(getMainLooper()); } @Override protected void onStart() { super.onStart(); blockEventsOnStart(); eventBus.post(new SettingsEnteredEvent()); } @Override protected void onStop() { super.onStop(); cancelBlockEventOnStart(); } @Override protected String getScreenName() { return "Settings"; } @Override public void onWindowFocusChanged(boolean hasFocus) { super.onWindowFocusChanged(hasFocus); if (hasFocus) { getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE); } } public static class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { @Inject public AudioBookManager audioBookManager; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); HomerPlayerApplication.getComponent(getActivity()).inject(this); addPreferencesFromResource(R.xml.preferences); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); updateScreenOrientationSummary(sharedPreferences); updateJumpBackSummary(sharedPreferences); if (Build.VERSION.SDK_INT < 21) { Preference kioskModePreference = findPreference(GlobalSettings.KEY_KIOSK_MODE); kioskModePreference.setEnabled(false); } if (Build.VERSION.SDK_INT < 19) { Preference simpleKioskModePreference = findPreference(GlobalSettings.KEY_SIMPLE_KIOSK_MODE); simpleKioskModePreference.setEnabled(false); } updateKioskModeSummaries(); ConfirmDialogPreference preferenceUnregisterDeviceOwner = (ConfirmDialogPreference) findPreference(KEY_UNREGISTER_DEVICE_OWNER); if (Build.VERSION.SDK_INT >= 21) { preferenceUnregisterDeviceOwner.setOnConfirmListener( new ConfirmDialogPreference.OnConfirmListener() { @Override public void onConfirmed() { disableDeviceOwner(); } }); updateUnregisterDeviceOwner(HomerPlayerDeviceAdmin.isDeviceOwner(getActivity())); } else { getPreferenceScreen().removePreference(preferenceUnregisterDeviceOwner); } ConfirmDialogPreference preferenceResetProgress = (ConfirmDialogPreference) findPreference(KEY_RESET_ALL_BOOK_PROGRESS); preferenceResetProgress.setOnConfirmListener(new ConfirmDialogPreference.OnConfirmListener() { @Override public void onConfirmed() { audioBookManager.resetAllBookProgress(); Toast.makeText( getActivity(), R.string.pref_reset_all_book_progress_done, Toast.LENGTH_SHORT).show(); } }); updateVersionSummary(); } @Override public void onStart() { super.onStart(); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); sharedPreferences.registerOnSharedPreferenceChangeListener(this); EventBus.getDefault().register(this); // A fix for the action bar covering the first preference. Preconditions.checkNotNull(getView()); getView().setFitsSystemWindows(true); } @Override public void onStop() { super.onStop(); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); sharedPreferences.unregisterOnSharedPreferenceChangeListener(this); EventBus.getDefault().unregister(this); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { switch (key) { case GlobalSettings.KEY_KIOSK_MODE: onKioskModeSwitched(sharedPreferences); break; case GlobalSettings.KEY_SIMPLE_KIOSK_MODE: onAnyKioskModeSwitched(); break; case GlobalSettings.KEY_JUMP_BACK: updateJumpBackSummary(sharedPreferences); break; case GlobalSettings.KEY_SCREEN_ORIENTATION: updateScreenOrientationSummary(sharedPreferences); break; } } @SuppressWarnings("UnusedDeclaration") public void onEvent(DeviceAdminChangeEvent deviceAdminChangeEvent) { updateUnregisterDeviceOwner(deviceAdminChangeEvent.isEnabled); } private void updateScreenOrientationSummary(SharedPreferences sharedPreferences) { String stringValue = sharedPreferences.getString( GlobalSettings.KEY_SCREEN_ORIENTATION, getString(R.string.pref_screen_orientation_default_value)); ListPreference preference = (ListPreference) findPreference(GlobalSettings.KEY_SCREEN_ORIENTATION); int index = preference.findIndexOfValue(stringValue); preference.setSummary(preference.getEntries()[index]); } private void updateJumpBackSummary(SharedPreferences sharedPreferences) { String stringValue = sharedPreferences.getString( GlobalSettings.KEY_JUMP_BACK, getString(R.string.pref_jump_back_default_value)); int value = Integer.parseInt(stringValue); Preference preference = findPreference(GlobalSettings.KEY_JUMP_BACK); if (value == 0) { preference.setSummary(R.string.pref_jump_back_entry_disabled); } else { preference.setSummary(String.format( getString(R.string.pref_jump_back_summary), value)); } } private void updateKioskModeSummaries() { SwitchPreference fullModePreference = (SwitchPreference) findPreference(GlobalSettings.KEY_KIOSK_MODE); { int summaryStringId; if (Build.VERSION.SDK_INT < 21) { summaryStringId = R.string.pref_kiosk_mode_full_summary_old_version; } else { summaryStringId = fullModePreference.isChecked() ? R.string.pref_kiosk_mode_any_summary_on : R.string.pref_kiosk_mode_any_summary_off; } fullModePreference.setSummary(summaryStringId); } SwitchPreference simpleModePreference = (SwitchPreference) findPreference(GlobalSettings.KEY_SIMPLE_KIOSK_MODE); { int summaryStringId; if (Build.VERSION.SDK_INT < 19) { summaryStringId = R.string.pref_kiosk_mode_simple_summary_old_version; } else { summaryStringId = simpleModePreference.isChecked() ? R.string.pref_kiosk_mode_any_summary_on : R.string.pref_kiosk_mode_any_summary_off; } simpleModePreference.setSummary(summaryStringId); simpleModePreference.setEnabled(!fullModePreference.isChecked()); } } private void updateUnregisterDeviceOwner(boolean isEnabled) { Preference preference = findPreference(KEY_UNREGISTER_DEVICE_OWNER); preference.setEnabled(isEnabled); preference.setSummary(getString(isEnabled ? R.string.pref_kiosk_mode_unregister_device_owner_summary_on : R.string.pref_kiosk_mode_unregister_device_owner_summary_off)); } private void updateVersionSummary() { Preference preference = findPreference(KEY_VERSION); preference.setSummary(BuildConfig.VERSION_NAME); } private void disableDeviceOwner() { SwitchPreference kioskModePreference = (SwitchPreference) findPreference(GlobalSettings.KEY_KIOSK_MODE); kioskModePreference.setChecked(false); HomerPlayerDeviceAdmin.clearDeviceOwner(getActivity()); } @SuppressLint("CommitPrefEdits") private void onKioskModeSwitched(SharedPreferences sharedPreferences) { boolean isTaskLocked = ApplicationLocker.isTaskLocked(getActivity()); boolean newKioskModeEnabled = sharedPreferences.getBoolean(GlobalSettings.KEY_KIOSK_MODE, false); if (newKioskModeEnabled && !isTaskLocked) { boolean isLocked = ApplicationLocker.lockApplication(getActivity()); if (!isLocked) { AlertDialog dialog = new AlertDialog.Builder(getActivity()) .setMessage(getResources().getString( R.string.settings_device_owner_required_alert)) .setNeutralButton(android.R.string.ok, null) .create(); dialog.show(); SharedPreferences.Editor editor = sharedPreferences.edit(); editor.putBoolean(GlobalSettings.KEY_KIOSK_MODE, false); editor.commit(); SwitchPreference switchPreference = (SwitchPreference) findPreference(GlobalSettings.KEY_KIOSK_MODE); switchPreference.setChecked(false); } } else if (!newKioskModeEnabled && isTaskLocked) { ApplicationLocker.unlockApplication(getActivity()); } onAnyKioskModeSwitched(); } private void onAnyKioskModeSwitched() { updateKioskModeSummaries(); } } private void blockEventsOnStart() { getWindow().setFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE, WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); unblockEventsTask = new Runnable() { @Override public void run() { getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); unblockEventsTask = null; } }; mainThreadHandler.postDelayed(unblockEventsTask, BLOCK_TIME_MS); } private void cancelBlockEventOnStart() { if (unblockEventsTask != null) mainThreadHandler.removeCallbacks(unblockEventsTask); } }
app/src/main/java/com/studio4plus/homerplayer/ui/SettingsActivity.java
package com.studio4plus.homerplayer.ui; import android.annotation.SuppressLint; import android.app.AlertDialog; import android.content.SharedPreferences; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.preference.ListPreference; import android.preference.Preference; import android.preference.PreferenceFragment; import android.preference.PreferenceManager; import android.preference.SwitchPreference; import android.view.View; import android.view.WindowManager; import android.widget.Toast; import com.studio4plus.homerplayer.BuildConfig; import com.studio4plus.homerplayer.GlobalSettings; import com.studio4plus.homerplayer.HomerPlayerApplication; import com.studio4plus.homerplayer.HomerPlayerDeviceAdmin; import com.studio4plus.homerplayer.R; import com.studio4plus.homerplayer.events.DeviceAdminChangeEvent; import com.studio4plus.homerplayer.events.SettingsEnteredEvent; import com.studio4plus.homerplayer.model.AudioBookManager; import javax.inject.Inject; import de.greenrobot.event.EventBus; public class SettingsActivity extends BaseActivity { // Pseudo preferences that don't change any preference values directly. private static final String KEY_KIOSK_MODE_SCREEN = "kiosk_mode_screen"; private static final String KEY_UNREGISTER_DEVICE_OWNER = "unregister_device_owner_preference"; private static final String KEY_RESET_ALL_BOOK_PROGRESS = "reset_all_book_progress_preference"; private static final String KEY_VERSION = "version_preference"; private static final int BLOCK_TIME_MS = 500; private Handler mainThreadHandler; private Runnable unblockEventsTask; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Display the fragment as the main content. getFragmentManager().beginTransaction() .replace(android.R.id.content, new SettingsFragment()) .commit(); mainThreadHandler = new Handler(getMainLooper()); } @Override protected void onStart() { super.onStart(); blockEventsOnStart(); eventBus.post(new SettingsEnteredEvent()); } @Override protected void onStop() { super.onStop(); cancelBlockEventOnStart(); } @Override protected String getScreenName() { return "Settings"; } @Override public void onWindowFocusChanged(boolean hasFocus) { super.onWindowFocusChanged(hasFocus); if (hasFocus) { getWindow().getDecorView().setSystemUiVisibility( View.SYSTEM_UI_FLAG_LAYOUT_STABLE); } } public static class SettingsFragment extends PreferenceFragment implements SharedPreferences.OnSharedPreferenceChangeListener { @Inject public AudioBookManager audioBookManager; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); HomerPlayerApplication.getComponent(getActivity()).inject(this); addPreferencesFromResource(R.xml.preferences); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); updateScreenOrientationSummary(sharedPreferences); updateJumpBackSummary(sharedPreferences); if (Build.VERSION.SDK_INT < 21) { Preference kioskModePreference = findPreference(GlobalSettings.KEY_KIOSK_MODE); kioskModePreference.setEnabled(false); } if (Build.VERSION.SDK_INT < 19) { Preference simpleKioskModePreference = findPreference(GlobalSettings.KEY_SIMPLE_KIOSK_MODE); simpleKioskModePreference.setEnabled(false); } updateKioskModeSummaries(); ConfirmDialogPreference preferenceUnregisterDeviceOwner = (ConfirmDialogPreference) findPreference(KEY_UNREGISTER_DEVICE_OWNER); if (Build.VERSION.SDK_INT >= 21) { preferenceUnregisterDeviceOwner.setOnConfirmListener( new ConfirmDialogPreference.OnConfirmListener() { @Override public void onConfirmed() { disableDeviceOwner(); } }); updateUnregisterDeviceOwner(HomerPlayerDeviceAdmin.isDeviceOwner(getActivity())); } else { getPreferenceScreen().removePreference(preferenceUnregisterDeviceOwner); } ConfirmDialogPreference preferenceResetProgress = (ConfirmDialogPreference) findPreference(KEY_RESET_ALL_BOOK_PROGRESS); preferenceResetProgress.setOnConfirmListener(new ConfirmDialogPreference.OnConfirmListener() { @Override public void onConfirmed() { audioBookManager.resetAllBookProgress(); Toast.makeText( getActivity(), R.string.pref_reset_all_book_progress_done, Toast.LENGTH_SHORT).show(); } }); updateVersionSummary(); } @Override public void onStart() { super.onStart(); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); sharedPreferences.registerOnSharedPreferenceChangeListener(this); EventBus.getDefault().register(this); } @Override public void onStop() { super.onStop(); SharedPreferences sharedPreferences = PreferenceManager.getDefaultSharedPreferences(getActivity()); sharedPreferences.unregisterOnSharedPreferenceChangeListener(this); EventBus.getDefault().unregister(this); } @Override public void onSharedPreferenceChanged(SharedPreferences sharedPreferences, String key) { switch (key) { case GlobalSettings.KEY_KIOSK_MODE: onKioskModeSwitched(sharedPreferences); break; case GlobalSettings.KEY_SIMPLE_KIOSK_MODE: onAnyKioskModeSwitched(); break; case GlobalSettings.KEY_JUMP_BACK: updateJumpBackSummary(sharedPreferences); break; case GlobalSettings.KEY_SCREEN_ORIENTATION: updateScreenOrientationSummary(sharedPreferences); break; } } @SuppressWarnings("UnusedDeclaration") public void onEvent(DeviceAdminChangeEvent deviceAdminChangeEvent) { updateUnregisterDeviceOwner(deviceAdminChangeEvent.isEnabled); } private void updateScreenOrientationSummary(SharedPreferences sharedPreferences) { String stringValue = sharedPreferences.getString( GlobalSettings.KEY_SCREEN_ORIENTATION, getString(R.string.pref_screen_orientation_default_value)); ListPreference preference = (ListPreference) findPreference(GlobalSettings.KEY_SCREEN_ORIENTATION); int index = preference.findIndexOfValue(stringValue); preference.setSummary(preference.getEntries()[index]); } private void updateJumpBackSummary(SharedPreferences sharedPreferences) { String stringValue = sharedPreferences.getString( GlobalSettings.KEY_JUMP_BACK, getString(R.string.pref_jump_back_default_value)); int value = Integer.parseInt(stringValue); Preference preference = findPreference(GlobalSettings.KEY_JUMP_BACK); if (value == 0) { preference.setSummary(R.string.pref_jump_back_entry_disabled); } else { preference.setSummary(String.format( getString(R.string.pref_jump_back_summary), value)); } } private void updateKioskModeSummaries() { SwitchPreference fullModePreference = (SwitchPreference) findPreference(GlobalSettings.KEY_KIOSK_MODE); { int summaryStringId; if (Build.VERSION.SDK_INT < 21) { summaryStringId = R.string.pref_kiosk_mode_full_summary_old_version; } else { summaryStringId = fullModePreference.isChecked() ? R.string.pref_kiosk_mode_any_summary_on : R.string.pref_kiosk_mode_any_summary_off; } fullModePreference.setSummary(summaryStringId); } SwitchPreference simpleModePreference = (SwitchPreference) findPreference(GlobalSettings.KEY_SIMPLE_KIOSK_MODE); { int summaryStringId; if (Build.VERSION.SDK_INT < 19) { summaryStringId = R.string.pref_kiosk_mode_simple_summary_old_version; } else { summaryStringId = simpleModePreference.isChecked() ? R.string.pref_kiosk_mode_any_summary_on : R.string.pref_kiosk_mode_any_summary_off; } simpleModePreference.setSummary(summaryStringId); simpleModePreference.setEnabled(!fullModePreference.isChecked()); } } private void updateUnregisterDeviceOwner(boolean isEnabled) { Preference preference = findPreference(KEY_UNREGISTER_DEVICE_OWNER); preference.setEnabled(isEnabled); preference.setSummary(getString(isEnabled ? R.string.pref_kiosk_mode_unregister_device_owner_summary_on : R.string.pref_kiosk_mode_unregister_device_owner_summary_off)); } private void updateVersionSummary() { Preference preference = findPreference(KEY_VERSION); preference.setSummary(BuildConfig.VERSION_NAME); } private void disableDeviceOwner() { SwitchPreference kioskModePreference = (SwitchPreference) findPreference(GlobalSettings.KEY_KIOSK_MODE); kioskModePreference.setChecked(false); HomerPlayerDeviceAdmin.clearDeviceOwner(getActivity()); } @SuppressLint("CommitPrefEdits") private void onKioskModeSwitched(SharedPreferences sharedPreferences) { boolean isTaskLocked = ApplicationLocker.isTaskLocked(getActivity()); boolean newKioskModeEnabled = sharedPreferences.getBoolean(GlobalSettings.KEY_KIOSK_MODE, false); if (newKioskModeEnabled && !isTaskLocked) { boolean isLocked = ApplicationLocker.lockApplication(getActivity()); if (!isLocked) { AlertDialog dialog = new AlertDialog.Builder(getActivity()) .setMessage(getResources().getString( R.string.settings_device_owner_required_alert)) .setNeutralButton(android.R.string.ok, null) .create(); dialog.show(); SharedPreferences.Editor editor = sharedPreferences.edit(); editor.putBoolean(GlobalSettings.KEY_KIOSK_MODE, false); editor.commit(); SwitchPreference switchPreference = (SwitchPreference) findPreference(GlobalSettings.KEY_KIOSK_MODE); switchPreference.setChecked(false); } } else if (!newKioskModeEnabled && isTaskLocked) { ApplicationLocker.unlockApplication(getActivity()); } onAnyKioskModeSwitched(); } private void onAnyKioskModeSwitched() { updateKioskModeSummaries(); } } private void blockEventsOnStart() { getWindow().setFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE, WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); unblockEventsTask = new Runnable() { @Override public void run() { getWindow().clearFlags(WindowManager.LayoutParams.FLAG_NOT_TOUCHABLE); unblockEventsTask = null; } }; mainThreadHandler.postDelayed(unblockEventsTask, BLOCK_TIME_MS); } private void cancelBlockEventOnStart() { if (unblockEventsTask != null) mainThreadHandler.removeCallbacks(unblockEventsTask); } }
Fix action bar covering the first preference.
app/src/main/java/com/studio4plus/homerplayer/ui/SettingsActivity.java
Fix action bar covering the first preference.
Java
mit
ece6161a491b923746208774b365ab845ea67612
0
Col-E/Recaf,Col-E/Recaf
package me.coley.recaf.decompile.fernflower; import me.coley.recaf.decompile.Decompiler; import me.coley.recaf.workspace.Workspace; import org.jetbrains.java.decompiler.main.extern.IBytecodeProvider; import org.jetbrains.java.decompiler.main.extern.IFernflowerPreferences; import java.io.*; import java.util.*; /** * FernFlower decompiler implementation. * * @author Matt */ public class FernFlowerDecompiler extends Decompiler<Object> { @Override protected Map<String, Object> generateDefaultOptions() { Map<String, Object> map = new HashMap<>(IFernflowerPreferences.getDefaults()); map.put("ind", "\t"); return map; } @Override public String decompile(Workspace workspace, String name) { FernFlowerLogger logger = new FernFlowerLogger(); DummyCollector collector = new DummyCollector(); IBytecodeProvider provider = (externalPath, internalPath) -> { if(internalPath != null) { String className = internalPath.substring(0, internalPath.indexOf(".class")); return workspace.getRawClass(className); } throw new IllegalStateException("Provider should only receive internal names."+ "Got external name: " + externalPath); }; FernFlowerAccessor decompiler = new FernFlowerAccessor(provider, collector, getOptions(), logger); try { decompiler.addWorkspace(workspace); } catch(IOException ex) { throw new IllegalStateException("Failed to load inputs for FernFlower!", ex); } catch(ReflectiveOperationException ex) { throw new IllegalStateException("Failed to setup FernFlower!", ex); } decompiler.analyze(); return decompiler.decompile(name); } }
src/main/java/me/coley/recaf/decompile/fernflower/FernFlowerDecompiler.java
package me.coley.recaf.decompile.fernflower; import me.coley.recaf.decompile.Decompiler; import me.coley.recaf.workspace.Workspace; import org.jetbrains.java.decompiler.main.extern.IBytecodeProvider; import org.jetbrains.java.decompiler.main.extern.IFernflowerPreferences; import java.io.*; import java.util.*; /** * FernFlower decompiler implementation. * * @author Matt */ public class FernFlowerDecompiler extends Decompiler<Object> { @Override protected Map<String, Object> generateDefaultOptions() { Map<String, Object> map = new HashMap<>(IFernflowerPreferences.getDefaults()); map.put("ind", "\t"); return map; } @Override public String decompile(Workspace workspace, String name) { FernFlowerLogger logger = new FernFlowerLogger(); DummyCollector collector = new DummyCollector(); IBytecodeProvider provider = (externalPath, internalPath) -> { if(internalPath != null) { String className = internalPath.substring(0, internalPath.indexOf(".class")); return workspace.getRawClass(className); } throw new IllegalStateException("Provider should only receive internal names."+ "Got external name: " + externalPath); }; FernFlowerAccessor decompiler = new FernFlowerAccessor(provider, collector, getOptions(), logger); try { decompiler.addWorkspace(workspace); } catch(IOException e) { e.printStackTrace(); } catch(ReflectiveOperationException e) { e.printStackTrace(); } decompiler.analyze(); return decompiler.decompile(name); } }
Throws for FernFlower failures
src/main/java/me/coley/recaf/decompile/fernflower/FernFlowerDecompiler.java
Throws for FernFlower failures
Java
apache-2.0
9ffb12fe4d058bf42423307d19c9ab8d54ded4ab
0
allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.testFramework; import com.intellij.ide.startup.impl.StartupManagerImpl; import com.intellij.lang.*; import com.intellij.lang.impl.PsiBuilderFactoryImpl; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.lang.injection.MultiHostInjector; import com.intellij.mock.*; import com.intellij.openapi.application.ex.PathManagerEx; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.extensions.DefaultPluginDescriptor; import com.intellij.openapi.extensions.ExtensionPoint; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.extensions.PluginDescriptor; import com.intellij.openapi.extensions.impl.ExtensionPointImpl; import com.intellij.openapi.extensions.impl.ExtensionsAreaImpl; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl; import com.intellij.openapi.fileEditor.impl.LoadTextUtil; import com.intellij.openapi.fileTypes.FileTypeFactory; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.options.SchemeManagerFactory; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.impl.ProgressManagerImpl; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.LineColumn; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.pom.PomModel; import com.intellij.pom.core.impl.PomModelImpl; import com.intellij.pom.tree.TreeAspect; import com.intellij.psi.*; import com.intellij.psi.impl.*; import com.intellij.psi.impl.source.resolve.reference.ReferenceProvidersRegistry; import com.intellij.psi.impl.source.resolve.reference.ReferenceProvidersRegistryImpl; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl; import com.intellij.psi.util.CachedValuesManager; import com.intellij.util.CachedValuesManagerImpl; import com.intellij.util.KeyedLazyInstance; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBus; import org.jetbrains.annotations.NotNull; import org.picocontainer.ComponentAdapter; import org.picocontainer.MutablePicoContainer; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.*; /** @noinspection JUnitTestCaseWithNonTrivialConstructors*/ public abstract class ParsingTestCase extends UsefulTestCase { private PluginDescriptor myPluginDescriptor; private MockApplication myApp; protected MockProjectEx myProject; protected String myFilePrefix = ""; protected String myFileExt; protected final String myFullDataPath; protected PsiFile myFile; private MockPsiManager myPsiManager; private PsiFileFactoryImpl myFileFactory; protected Language myLanguage; private final ParserDefinition[] myDefinitions; private final boolean myLowercaseFirstLetter; private ExtensionPointImpl<KeyedLazyInstance<ParserDefinition>> myLangParserDefinition; protected ParsingTestCase(@NotNull String dataPath, @NotNull String fileExt, @NotNull ParserDefinition... definitions) { this(dataPath, fileExt, false, definitions); } protected ParsingTestCase(@NotNull String dataPath, @NotNull String fileExt, boolean lowercaseFirstLetter, @NotNull ParserDefinition... definitions) { myDefinitions = definitions; myFullDataPath = getTestDataPath() + "/" + dataPath; myFileExt = fileExt; myLowercaseFirstLetter = lowercaseFirstLetter; } @NotNull protected MockApplication getApplication() { return myApp; } @Override protected void setUp() throws Exception { super.setUp(); MockApplication app = MockApplication.setUp(getTestRootDisposable()); myApp = app; MutablePicoContainer appContainer = app.getPicoContainer(); ComponentAdapter component = appContainer.getComponentAdapter(ProgressManager.class.getName()); if (component == null) { appContainer.registerComponentInstance(ProgressManager.class.getName(), new ProgressManagerImpl()); } myProject = new MockProjectEx(getTestRootDisposable()); myPsiManager = new MockPsiManager(myProject); myFileFactory = new PsiFileFactoryImpl(myPsiManager); appContainer.registerComponentInstance(MessageBus.class, app.getMessageBus()); appContainer.registerComponentInstance(SchemeManagerFactory.class, new MockSchemeManagerFactory()); MockEditorFactory editorFactory = new MockEditorFactory(); appContainer.registerComponentInstance(EditorFactory.class, editorFactory); appContainer.registerComponentInstance(FileDocumentManager.class, new MockFileDocumentManagerImpl(charSequence -> { return editorFactory.createDocument(charSequence); }, FileDocumentManagerImpl.HARD_REF_TO_DOCUMENT_KEY)); appContainer.registerComponentInstance(PsiDocumentManager.class, new MockPsiDocumentManager()); app.registerService(PsiBuilderFactory.class, new PsiBuilderFactoryImpl()); app.registerService(DefaultASTFactory.class, new DefaultASTFactoryImpl()); app.registerService(ReferenceProvidersRegistry.class, new ReferenceProvidersRegistryImpl()); myProject.registerService(CachedValuesManager.class, new CachedValuesManagerImpl(myProject, new PsiCachedValuesFactory(myPsiManager))); myProject.registerService(PsiManager.class, myPsiManager); myProject.registerService(StartupManager.class, new StartupManagerImpl(myProject)); registerExtensionPoint(app.getExtensionArea(), FileTypeFactory.FILE_TYPE_FACTORY_EP, FileTypeFactory.class); registerExtensionPoint(app.getExtensionArea(), MetaLanguage.EP_NAME, MetaLanguage.class); myLangParserDefinition = app.getExtensionArea().registerFakeBeanPoint(LanguageParserDefinitions.INSTANCE.getName(), getPluginDescriptor()); if (myDefinitions.length > 0) { configureFromParserDefinition(myDefinitions[0], myFileExt); // first definition is registered by configureFromParserDefinition for (int i = 1, length = myDefinitions.length; i < length; i++) { registerParserDefinition(myDefinitions[i]); } } // That's for reparse routines PomModelImpl pomModel = new PomModelImpl(myProject); myProject.registerService(PomModel.class, pomModel); new TreeAspect(pomModel); } protected final void registerParserDefinition(@NotNull ParserDefinition definition) { final Language language = definition.getFileNodeType().getLanguage(); myLangParserDefinition.registerExtension(new KeyedLazyInstance<ParserDefinition>() { @Override public String getKey() { return language.getID(); } @NotNull @Override public ParserDefinition getInstance() { return definition; } }); LanguageParserDefinitions.INSTANCE.clearCache(language); disposeOnTearDown(() -> LanguageParserDefinitions.INSTANCE.clearCache(language)); } public void configureFromParserDefinition(@NotNull ParserDefinition definition, String extension) { myLanguage = definition.getFileNodeType().getLanguage(); myFileExt = extension; registerParserDefinition(definition); myApp.getPicoContainer().registerComponentInstance(FileTypeManager.class, new MockFileTypeManager(new MockLanguageFileType(myLanguage, myFileExt))); } protected final <T> void registerExtension(@NotNull ExtensionPointName<T> name, @NotNull T extension) { //noinspection unchecked registerExtensions(name, (Class<T>)extension.getClass(), Collections.singletonList(extension)); } protected final <T> void registerExtensions(@NotNull ExtensionPointName<T> name, @NotNull Class<T> extensionClass, @NotNull List<T> extensions) { ExtensionsAreaImpl area = myApp.getExtensionArea(); ExtensionPoint<T> point = area.getExtensionPointIfRegistered(name.getName()); if (point == null) { point = registerExtensionPoint(area, name, extensionClass); } for (T extension : extensions) { // no need to specify disposable because ParsingTestCase in any case clean area for each test //noinspection deprecation point.registerExtension(extension); } } protected final <T> void addExplicitExtension(@NotNull LanguageExtension<T> collector, @NotNull Language language, @NotNull T object) { ExtensionsAreaImpl area = myApp.getExtensionArea(); if (!area.hasExtensionPoint(collector.getName())) { area.registerFakeBeanPoint(collector.getName(), getPluginDescriptor()); } ExtensionTestUtil.addExtension(area, collector, language, object); } protected final <T> void registerExtensionPoint(@NotNull ExtensionPointName<T> extensionPointName, @NotNull Class<T> aClass) { registerExtensionPoint(myApp.getExtensionArea(), extensionPointName, aClass); } protected <T> ExtensionPointImpl<T> registerExtensionPoint(@NotNull ExtensionsAreaImpl extensionArea, @NotNull ExtensionPointName<T> extensionPointName, @NotNull Class<T> extensionClass) { // todo get rid of it - registerExtensionPoint should be not called several times String name = extensionPointName.getName(); if (extensionArea.hasExtensionPoint(name)) { return extensionArea.getExtensionPoint(name); } else { return extensionArea.registerPoint(name, extensionClass, getPluginDescriptor()); } } @NotNull // easy debug of not disposed extension private PluginDescriptor getPluginDescriptor() { PluginDescriptor pluginDescriptor = myPluginDescriptor; if (pluginDescriptor == null) { pluginDescriptor = new DefaultPluginDescriptor(getClass().getName() + "." + getName()); myPluginDescriptor = pluginDescriptor; } return pluginDescriptor; } @NotNull public MockProjectEx getProject() { return myProject; } public MockPsiManager getPsiManager() { return myPsiManager; } @Override protected void tearDown() throws Exception { myFile = null; myProject = null; myPsiManager = null; myFileFactory = null; super.tearDown(); } protected String getTestDataPath() { return PathManagerEx.getTestDataPath(); } @NotNull public final String getTestName() { return getTestName(myLowercaseFirstLetter); } protected boolean includeRanges() { return false; } protected boolean skipSpaces() { return false; } protected boolean checkAllPsiRoots() { return true; } /* Sanity check against thoughtlessly copy-pasting actual test results as the expected test data. */ protected void ensureNoErrorElements() { myFile.accept(new PsiRecursiveElementVisitor() { private static final int TAB_WIDTH = 8; @Override public void visitErrorElement(PsiErrorElement element) { // Very dump approach since a corresponding Document is not available. String text = myFile.getText(); String[] lines = StringUtil.splitByLinesKeepSeparators(text); int offset = element.getTextOffset(); LineColumn position = StringUtil.offsetToLineColumn(text, offset); int lineNumber = position != null ? position.line : -1; int column = position != null ? position.column : 0; String line = StringUtil.trimTrailing(lines[lineNumber]); // Sanitize: expand indentation tabs, replace the rest with a single space int numIndentTabs = StringUtil.countChars(line.subSequence(0, column), '\t', 0, true); int indentedColumn = column + numIndentTabs * (TAB_WIDTH - 1); String lineWithNoTabs = StringUtil.repeat(" ", numIndentTabs * TAB_WIDTH) + line.substring(numIndentTabs).replace('\t', ' '); String errorUnderline = StringUtil.repeat(" ", indentedColumn) + StringUtil.repeat("^", Math.max(1, element.getTextLength())); fail(String.format("Unexpected error element: %s:%d:%d\n\n%s\n%s\n%s", myFile.getName(), lineNumber + 1, column, lineWithNoTabs, errorUnderline, element.getErrorDescription())); } }); } protected void doTest(boolean checkResult) { doTest(checkResult, false); } protected void doTest(boolean checkResult, boolean ensureNoErrorElements) { String name = getTestName(); try { String text = loadFile(name + "." + myFileExt); myFile = createPsiFile(name, text); ensureParsed(myFile); assertEquals("light virtual file text mismatch", text, ((LightVirtualFile)myFile.getVirtualFile()).getContent().toString()); assertEquals("virtual file text mismatch", text, LoadTextUtil.loadText(myFile.getVirtualFile())); assertEquals("doc text mismatch", text, Objects.requireNonNull(myFile.getViewProvider().getDocument()).getText()); assertEquals("psi text mismatch", text, myFile.getText()); ensureCorrectReparse(myFile); if (checkResult) { checkResult(name, myFile); if (ensureNoErrorElements) { ensureNoErrorElements(); } } else { toParseTreeText(myFile, skipSpaces(), includeRanges()); } } catch (IOException e) { throw new RuntimeException(e); } } protected void doTest(String suffix) throws IOException { String name = getTestName(); String text = loadFile(name + "." + myFileExt); myFile = createPsiFile(name, text); ensureParsed(myFile); assertEquals(text, myFile.getText()); checkResult(name + suffix, myFile); } protected void doCodeTest(@NotNull String code) throws IOException { String name = getTestName(); myFile = createPsiFile("a", code); ensureParsed(myFile); assertEquals(code, myFile.getText()); checkResult(myFilePrefix + name, myFile); } protected PsiFile createPsiFile(@NotNull String name, @NotNull String text) { return createFile(name + "." + myFileExt, text); } protected PsiFile createFile(@NotNull String name, @NotNull String text) { LightVirtualFile virtualFile = new LightVirtualFile(name, myLanguage, text); virtualFile.setCharset(StandardCharsets.UTF_8); return createFile(virtualFile); } protected PsiFile createFile(@NotNull LightVirtualFile virtualFile) { return myFileFactory.trySetupPsiForFile(virtualFile, myLanguage, true, false); } protected void checkResult(@NotNull @TestDataFile String targetDataName, @NotNull PsiFile file) throws IOException { doCheckResult(myFullDataPath, file, checkAllPsiRoots(), targetDataName, skipSpaces(), includeRanges(), allTreesInSingleFile()); } protected boolean allTreesInSingleFile() { return false; } public static void doCheckResult(@NotNull String testDataDir, @NotNull PsiFile file, boolean checkAllPsiRoots, @NotNull String targetDataName, boolean skipSpaces, boolean printRanges) { doCheckResult(testDataDir, file, checkAllPsiRoots, targetDataName, skipSpaces, printRanges, false); } public static void doCheckResult(@NotNull String testDataDir, @NotNull PsiFile file, boolean checkAllPsiRoots, @NotNull String targetDataName, boolean skipSpaces, boolean printRanges, boolean allTreesInSingleFile) { FileViewProvider provider = file.getViewProvider(); Set<Language> languages = provider.getLanguages(); if (!checkAllPsiRoots || languages.size() == 1) { doCheckResult(testDataDir, targetDataName + ".txt", toParseTreeText(file, skipSpaces, printRanges).trim()); return; } if (allTreesInSingleFile) { String expectedName = targetDataName + ".txt"; StringBuilder sb = new StringBuilder(); List<Language> languagesList = new ArrayList<>(languages); ContainerUtil.sort(languagesList, Comparator.comparing(Language::getID)); for (Language language : languagesList) { sb.append("Subtree: ").append(language.getDisplayName()).append(" (").append(language.getID()).append(")").append("\n") .append(toParseTreeText(provider.getPsi(language), skipSpaces, printRanges).trim()) .append("\n").append(StringUtil.repeat("-", 80)).append("\n"); } doCheckResult(testDataDir, expectedName, sb.toString()); } else { for (Language language : languages) { PsiFile root = provider.getPsi(language); assertNotNull("FileViewProvider " + provider + " didn't return PSI root for language " + language.getID(), root); String expectedName = targetDataName + "." + language.getID() + ".txt"; doCheckResult(testDataDir, expectedName, toParseTreeText(root, skipSpaces, printRanges).trim()); } } } protected void checkResult(@NotNull String actual) { String name = getTestName(); doCheckResult(myFullDataPath, myFilePrefix + name + ".txt", actual); } protected void checkResult(@NotNull @TestDataFile String targetDataName, @NotNull String actual) { doCheckResult(myFullDataPath, targetDataName, actual); } public static void doCheckResult(@NotNull String fullPath, @NotNull String targetDataName, @NotNull String actual) { String expectedFileName = fullPath + File.separatorChar + targetDataName; UsefulTestCase.assertSameLinesWithFile(expectedFileName, actual); } protected static String toParseTreeText(@NotNull PsiElement file, boolean skipSpaces, boolean printRanges) { return DebugUtil.psiToString(file, skipSpaces, printRanges); } protected String loadFile(@NotNull @TestDataFile String name) throws IOException { return loadFileDefault(myFullDataPath, name); } public static String loadFileDefault(@NotNull String dir, @NotNull String name) throws IOException { return FileUtil.loadFile(new File(dir, name), CharsetToolkit.UTF8, true).trim(); } public static void ensureParsed(@NotNull PsiFile file) { file.accept(new PsiElementVisitor() { @Override public void visitElement(PsiElement element) { element.acceptChildren(this); } }); } public static void ensureCorrectReparse(@NotNull final PsiFile file) { final String psiToStringDefault = DebugUtil.psiToString(file, false, false); DebugUtil.performPsiModification("ensureCorrectReparse", () -> { final String fileText = file.getText(); final DiffLog diffLog = new BlockSupportImpl(file.getProject()).reparseRange( file, file.getNode(), TextRange.allOf(fileText), fileText, new EmptyProgressIndicator(), fileText); diffLog.performActualPsiChange(file); }); assertEquals(psiToStringDefault, DebugUtil.psiToString(file, false, false)); } public void registerMockInjectedLanguageManager() { registerExtensionPoint(myProject.getExtensionArea(), MultiHostInjector.MULTIHOST_INJECTOR_EP_NAME, MultiHostInjector.class); registerExtensionPoint(myApp.getExtensionArea(), LanguageInjector.EXTENSION_POINT_NAME, LanguageInjector.class); myProject.registerService(InjectedLanguageManager.class, new InjectedLanguageManagerImpl(myProject, new MockDumbService(myProject))); } }
platform/testFramework/src/com/intellij/testFramework/ParsingTestCase.java
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package com.intellij.testFramework; import com.intellij.ide.startup.impl.StartupManagerImpl; import com.intellij.lang.*; import com.intellij.lang.impl.PsiBuilderFactoryImpl; import com.intellij.lang.injection.InjectedLanguageManager; import com.intellij.lang.injection.MultiHostInjector; import com.intellij.mock.*; import com.intellij.openapi.application.ex.PathManagerEx; import com.intellij.openapi.editor.EditorFactory; import com.intellij.openapi.extensions.DefaultPluginDescriptor; import com.intellij.openapi.extensions.ExtensionPoint; import com.intellij.openapi.extensions.ExtensionPointName; import com.intellij.openapi.extensions.PluginDescriptor; import com.intellij.openapi.extensions.impl.ExtensionPointImpl; import com.intellij.openapi.extensions.impl.ExtensionsAreaImpl; import com.intellij.openapi.fileEditor.FileDocumentManager; import com.intellij.openapi.fileEditor.impl.FileDocumentManagerImpl; import com.intellij.openapi.fileEditor.impl.LoadTextUtil; import com.intellij.openapi.fileTypes.FileTypeFactory; import com.intellij.openapi.fileTypes.FileTypeManager; import com.intellij.openapi.options.SchemeManagerFactory; import com.intellij.openapi.progress.EmptyProgressIndicator; import com.intellij.openapi.progress.ProgressManager; import com.intellij.openapi.progress.impl.ProgressManagerImpl; import com.intellij.openapi.startup.StartupManager; import com.intellij.openapi.util.TextRange; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.LineColumn; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.CharsetToolkit; import com.intellij.pom.PomModel; import com.intellij.pom.core.impl.PomModelImpl; import com.intellij.pom.tree.TreeAspect; import com.intellij.psi.*; import com.intellij.psi.impl.*; import com.intellij.psi.impl.source.resolve.reference.ReferenceProvidersRegistry; import com.intellij.psi.impl.source.resolve.reference.ReferenceProvidersRegistryImpl; import com.intellij.psi.impl.source.tree.injected.InjectedLanguageManagerImpl; import com.intellij.psi.util.CachedValuesManager; import com.intellij.util.CachedValuesManagerImpl; import com.intellij.util.KeyedLazyInstance; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.messages.MessageBus; import org.jetbrains.annotations.NotNull; import org.picocontainer.ComponentAdapter; import org.picocontainer.MutablePicoContainer; import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; import java.util.*; /** @noinspection JUnitTestCaseWithNonTrivialConstructors*/ public abstract class ParsingTestCase extends UsefulTestCase { private PluginDescriptor myPluginDescriptor; private MockApplication myApp; protected MockProjectEx myProject; protected String myFilePrefix = ""; protected String myFileExt; protected final String myFullDataPath; protected PsiFile myFile; private MockPsiManager myPsiManager; private PsiFileFactoryImpl myFileFactory; protected Language myLanguage; private final ParserDefinition[] myDefinitions; private final boolean myLowercaseFirstLetter; private ExtensionPointImpl<KeyedLazyInstance<ParserDefinition>> myLangParserDefinition; protected ParsingTestCase(@NotNull String dataPath, @NotNull String fileExt, @NotNull ParserDefinition... definitions) { this(dataPath, fileExt, false, definitions); } protected ParsingTestCase(@NotNull String dataPath, @NotNull String fileExt, boolean lowercaseFirstLetter, @NotNull ParserDefinition... definitions) { myDefinitions = definitions; myFullDataPath = getTestDataPath() + "/" + dataPath; myFileExt = fileExt; myLowercaseFirstLetter = lowercaseFirstLetter; } @NotNull protected MockApplication getApplication() { return myApp; } @Override protected void setUp() throws Exception { super.setUp(); MockApplication app = MockApplication.setUp(getTestRootDisposable()); myApp = app; MutablePicoContainer appContainer = app.getPicoContainer(); ComponentAdapter component = appContainer.getComponentAdapter(ProgressManager.class.getName()); if (component == null) { appContainer.registerComponentInstance(ProgressManager.class.getName(), new ProgressManagerImpl()); } myProject = new MockProjectEx(getTestRootDisposable()); myPsiManager = new MockPsiManager(myProject); myFileFactory = new PsiFileFactoryImpl(myPsiManager); appContainer.registerComponentInstance(MessageBus.class, app.getMessageBus()); appContainer.registerComponentInstance(SchemeManagerFactory.class, new MockSchemeManagerFactory()); MockEditorFactory editorFactory = new MockEditorFactory(); appContainer.registerComponentInstance(EditorFactory.class, editorFactory); appContainer.registerComponentInstance(FileDocumentManager.class, new MockFileDocumentManagerImpl(charSequence -> { return editorFactory.createDocument(charSequence); }, FileDocumentManagerImpl.HARD_REF_TO_DOCUMENT_KEY)); appContainer.registerComponentInstance(PsiDocumentManager.class, new MockPsiDocumentManager()); app.registerService(PsiBuilderFactory.class, new PsiBuilderFactoryImpl()); app.registerService(DefaultASTFactory.class, new DefaultASTFactoryImpl()); app.registerService(ReferenceProvidersRegistry.class, new ReferenceProvidersRegistryImpl()); myProject.registerService(CachedValuesManager.class, new CachedValuesManagerImpl(myProject, new PsiCachedValuesFactory(myPsiManager))); myProject.registerService(PsiManager.class, myPsiManager); myProject.registerService(StartupManager.class, new StartupManagerImpl(myProject)); registerExtensionPoint(app.getExtensionArea(), FileTypeFactory.FILE_TYPE_FACTORY_EP, FileTypeFactory.class); registerExtensionPoint(app.getExtensionArea(), MetaLanguage.EP_NAME, MetaLanguage.class); myLangParserDefinition = app.getExtensionArea().registerFakeBeanPoint(LanguageParserDefinitions.INSTANCE.getName(), getPluginDescriptor()); if (myDefinitions.length > 0) { configureFromParserDefinition(myDefinitions[0], myFileExt); // first definition is registered by configureFromParserDefinition for (int i = 1, length = myDefinitions.length; i < length; i++) { registerParserDefinition(myDefinitions[i]); } } // That's for reparse routines PomModelImpl pomModel = new PomModelImpl(myProject); myProject.registerService(PomModel.class, pomModel); new TreeAspect(pomModel); } protected final void registerParserDefinition(@NotNull ParserDefinition definition) { final Language language = definition.getFileNodeType().getLanguage(); myLangParserDefinition.registerExtension(new KeyedLazyInstance<ParserDefinition>() { @Override public String getKey() { return language.getID(); } @NotNull @Override public ParserDefinition getInstance() { return definition; } }); LanguageParserDefinitions.INSTANCE.clearCache(language); } public void configureFromParserDefinition(@NotNull ParserDefinition definition, String extension) { myLanguage = definition.getFileNodeType().getLanguage(); myFileExt = extension; registerParserDefinition(definition); myApp.getPicoContainer().registerComponentInstance(FileTypeManager.class, new MockFileTypeManager(new MockLanguageFileType(myLanguage, myFileExt))); } protected final <T> void registerExtension(@NotNull ExtensionPointName<T> name, @NotNull T extension) { //noinspection unchecked registerExtensions(name, (Class<T>)extension.getClass(), Collections.singletonList(extension)); } protected final <T> void registerExtensions(@NotNull ExtensionPointName<T> name, @NotNull Class<T> extensionClass, @NotNull List<T> extensions) { ExtensionsAreaImpl area = myApp.getExtensionArea(); ExtensionPoint<T> point = area.getExtensionPointIfRegistered(name.getName()); if (point == null) { point = registerExtensionPoint(area, name, extensionClass); } for (T extension : extensions) { // no need to specify disposable because ParsingTestCase in any case clean area for each test //noinspection deprecation point.registerExtension(extension); } } protected final <T> void addExplicitExtension(@NotNull LanguageExtension<T> collector, @NotNull Language language, @NotNull T object) { ExtensionsAreaImpl area = myApp.getExtensionArea(); if (!area.hasExtensionPoint(collector.getName())) { area.registerFakeBeanPoint(collector.getName(), getPluginDescriptor()); } ExtensionTestUtil.addExtension(area, collector, language, object); } protected final <T> void registerExtensionPoint(@NotNull ExtensionPointName<T> extensionPointName, @NotNull Class<T> aClass) { registerExtensionPoint(myApp.getExtensionArea(), extensionPointName, aClass); } protected <T> ExtensionPointImpl<T> registerExtensionPoint(@NotNull ExtensionsAreaImpl extensionArea, @NotNull ExtensionPointName<T> extensionPointName, @NotNull Class<T> extensionClass) { // todo get rid of it - registerExtensionPoint should be not called several times String name = extensionPointName.getName(); if (extensionArea.hasExtensionPoint(name)) { return extensionArea.getExtensionPoint(name); } else { return extensionArea.registerPoint(name, extensionClass, getPluginDescriptor()); } } @NotNull // easy debug of not disposed extension private PluginDescriptor getPluginDescriptor() { PluginDescriptor pluginDescriptor = myPluginDescriptor; if (pluginDescriptor == null) { pluginDescriptor = new DefaultPluginDescriptor(getClass().getName() + "." + getName()); myPluginDescriptor = pluginDescriptor; } return pluginDescriptor; } @NotNull public MockProjectEx getProject() { return myProject; } public MockPsiManager getPsiManager() { return myPsiManager; } @Override protected void tearDown() throws Exception { myFile = null; myProject = null; myPsiManager = null; myFileFactory = null; super.tearDown(); } protected String getTestDataPath() { return PathManagerEx.getTestDataPath(); } @NotNull public final String getTestName() { return getTestName(myLowercaseFirstLetter); } protected boolean includeRanges() { return false; } protected boolean skipSpaces() { return false; } protected boolean checkAllPsiRoots() { return true; } /* Sanity check against thoughtlessly copy-pasting actual test results as the expected test data. */ protected void ensureNoErrorElements() { myFile.accept(new PsiRecursiveElementVisitor() { private static final int TAB_WIDTH = 8; @Override public void visitErrorElement(PsiErrorElement element) { // Very dump approach since a corresponding Document is not available. String text = myFile.getText(); String[] lines = StringUtil.splitByLinesKeepSeparators(text); int offset = element.getTextOffset(); LineColumn position = StringUtil.offsetToLineColumn(text, offset); int lineNumber = position != null ? position.line : -1; int column = position != null ? position.column : 0; String line = StringUtil.trimTrailing(lines[lineNumber]); // Sanitize: expand indentation tabs, replace the rest with a single space int numIndentTabs = StringUtil.countChars(line.subSequence(0, column), '\t', 0, true); int indentedColumn = column + numIndentTabs * (TAB_WIDTH - 1); String lineWithNoTabs = StringUtil.repeat(" ", numIndentTabs * TAB_WIDTH) + line.substring(numIndentTabs).replace('\t', ' '); String errorUnderline = StringUtil.repeat(" ", indentedColumn) + StringUtil.repeat("^", Math.max(1, element.getTextLength())); fail(String.format("Unexpected error element: %s:%d:%d\n\n%s\n%s\n%s", myFile.getName(), lineNumber + 1, column, lineWithNoTabs, errorUnderline, element.getErrorDescription())); } }); } protected void doTest(boolean checkResult) { doTest(checkResult, false); } protected void doTest(boolean checkResult, boolean ensureNoErrorElements) { String name = getTestName(); try { String text = loadFile(name + "." + myFileExt); myFile = createPsiFile(name, text); ensureParsed(myFile); assertEquals("light virtual file text mismatch", text, ((LightVirtualFile)myFile.getVirtualFile()).getContent().toString()); assertEquals("virtual file text mismatch", text, LoadTextUtil.loadText(myFile.getVirtualFile())); assertEquals("doc text mismatch", text, Objects.requireNonNull(myFile.getViewProvider().getDocument()).getText()); assertEquals("psi text mismatch", text, myFile.getText()); ensureCorrectReparse(myFile); if (checkResult) { checkResult(name, myFile); if (ensureNoErrorElements) { ensureNoErrorElements(); } } else { toParseTreeText(myFile, skipSpaces(), includeRanges()); } } catch (IOException e) { throw new RuntimeException(e); } } protected void doTest(String suffix) throws IOException { String name = getTestName(); String text = loadFile(name + "." + myFileExt); myFile = createPsiFile(name, text); ensureParsed(myFile); assertEquals(text, myFile.getText()); checkResult(name + suffix, myFile); } protected void doCodeTest(@NotNull String code) throws IOException { String name = getTestName(); myFile = createPsiFile("a", code); ensureParsed(myFile); assertEquals(code, myFile.getText()); checkResult(myFilePrefix + name, myFile); } protected PsiFile createPsiFile(@NotNull String name, @NotNull String text) { return createFile(name + "." + myFileExt, text); } protected PsiFile createFile(@NotNull String name, @NotNull String text) { LightVirtualFile virtualFile = new LightVirtualFile(name, myLanguage, text); virtualFile.setCharset(StandardCharsets.UTF_8); return createFile(virtualFile); } protected PsiFile createFile(@NotNull LightVirtualFile virtualFile) { return myFileFactory.trySetupPsiForFile(virtualFile, myLanguage, true, false); } protected void checkResult(@NotNull @TestDataFile String targetDataName, @NotNull PsiFile file) throws IOException { doCheckResult(myFullDataPath, file, checkAllPsiRoots(), targetDataName, skipSpaces(), includeRanges(), allTreesInSingleFile()); } protected boolean allTreesInSingleFile() { return false; } public static void doCheckResult(@NotNull String testDataDir, @NotNull PsiFile file, boolean checkAllPsiRoots, @NotNull String targetDataName, boolean skipSpaces, boolean printRanges) { doCheckResult(testDataDir, file, checkAllPsiRoots, targetDataName, skipSpaces, printRanges, false); } public static void doCheckResult(@NotNull String testDataDir, @NotNull PsiFile file, boolean checkAllPsiRoots, @NotNull String targetDataName, boolean skipSpaces, boolean printRanges, boolean allTreesInSingleFile) { FileViewProvider provider = file.getViewProvider(); Set<Language> languages = provider.getLanguages(); if (!checkAllPsiRoots || languages.size() == 1) { doCheckResult(testDataDir, targetDataName + ".txt", toParseTreeText(file, skipSpaces, printRanges).trim()); return; } if (allTreesInSingleFile) { String expectedName = targetDataName + ".txt"; StringBuilder sb = new StringBuilder(); List<Language> languagesList = new ArrayList<>(languages); ContainerUtil.sort(languagesList, Comparator.comparing(Language::getID)); for (Language language : languagesList) { sb.append("Subtree: ").append(language.getDisplayName()).append(" (").append(language.getID()).append(")").append("\n") .append(toParseTreeText(provider.getPsi(language), skipSpaces, printRanges).trim()) .append("\n").append(StringUtil.repeat("-", 80)).append("\n"); } doCheckResult(testDataDir, expectedName, sb.toString()); } else { for (Language language : languages) { PsiFile root = provider.getPsi(language); assertNotNull("FileViewProvider " + provider + " didn't return PSI root for language " + language.getID(), root); String expectedName = targetDataName + "." + language.getID() + ".txt"; doCheckResult(testDataDir, expectedName, toParseTreeText(root, skipSpaces, printRanges).trim()); } } } protected void checkResult(@NotNull String actual) { String name = getTestName(); doCheckResult(myFullDataPath, myFilePrefix + name + ".txt", actual); } protected void checkResult(@NotNull @TestDataFile String targetDataName, @NotNull String actual) { doCheckResult(myFullDataPath, targetDataName, actual); } public static void doCheckResult(@NotNull String fullPath, @NotNull String targetDataName, @NotNull String actual) { String expectedFileName = fullPath + File.separatorChar + targetDataName; UsefulTestCase.assertSameLinesWithFile(expectedFileName, actual); } protected static String toParseTreeText(@NotNull PsiElement file, boolean skipSpaces, boolean printRanges) { return DebugUtil.psiToString(file, skipSpaces, printRanges); } protected String loadFile(@NotNull @TestDataFile String name) throws IOException { return loadFileDefault(myFullDataPath, name); } public static String loadFileDefault(@NotNull String dir, @NotNull String name) throws IOException { return FileUtil.loadFile(new File(dir, name), CharsetToolkit.UTF8, true).trim(); } public static void ensureParsed(@NotNull PsiFile file) { file.accept(new PsiElementVisitor() { @Override public void visitElement(PsiElement element) { element.acceptChildren(this); } }); } public static void ensureCorrectReparse(@NotNull final PsiFile file) { final String psiToStringDefault = DebugUtil.psiToString(file, false, false); DebugUtil.performPsiModification("ensureCorrectReparse", () -> { final String fileText = file.getText(); final DiffLog diffLog = new BlockSupportImpl(file.getProject()).reparseRange( file, file.getNode(), TextRange.allOf(fileText), fileText, new EmptyProgressIndicator(), fileText); diffLog.performActualPsiChange(file); }); assertEquals(psiToStringDefault, DebugUtil.psiToString(file, false, false)); } public void registerMockInjectedLanguageManager() { registerExtensionPoint(myProject.getExtensionArea(), MultiHostInjector.MULTIHOST_INJECTOR_EP_NAME, MultiHostInjector.class); registerExtensionPoint(myApp.getExtensionArea(), LanguageInjector.EXTENSION_POINT_NAME, LanguageInjector.class); myProject.registerService(InjectedLanguageManager.class, new InjectedLanguageManagerImpl(myProject, new MockDumbService(myProject))); } }
tests: clear ParserDefinition stored in Language's user data when ParsingTestCase finished After 87a77b47af748 ParsingTestCase doesn't use addExplicitExtension so we need to explicitly clear the value cached in UserData of Language instance when a test finishes, otherwise tests which run after it may get incorrect instance (e.g. PyRedundantParenthesesInspectionTest will fail if run after IPythonConsoleParsingTest). GitOrigin-RevId: ddf0e34df3c1366e212a8d424ccafc549bbbe4c1
platform/testFramework/src/com/intellij/testFramework/ParsingTestCase.java
tests: clear ParserDefinition stored in Language's user data when ParsingTestCase finished
Java
apache-2.0
98f67f00d6c6614f989f3373474fefd202629cee
0
this/carbon-uuf-maven-tools,sajithar/carbon-uuf-maven-plugin,this/carbon-uuf-maven-tools,this/carbon-uuf-maven-tools
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.uuf.maven.util; import org.apache.commons.io.IOUtils; import org.apache.maven.plugin.MojoExecutionException; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.util.Arrays; import java.util.stream.Collectors; /** * Utility class that creates various configuration files needed by the UUF project creation Mojo's. * * @since 1.0.0 */ public class ConfigFileCreator { private static final String FILE_OSGI_IMPORTS = "osgi-imports"; private static final String FILE_CONFIG_YAML = "config.yaml"; private static final String FILE_DEPENDENCY_TREE = "dependency-tree.yaml"; private static final String FILE_FEATURE_PROPERTIES = "feature.properties"; private static final String FILE_P2_INF = "p2.inf"; public static void createOsgiImports(String osgiImportsContent, String outputDirectoryPath) throws MojoExecutionException { if ((osgiImportsContent == null) || osgiImportsContent.isEmpty()) { return; } try { String osgiImports = Arrays.stream(osgiImportsContent.trim().split(",")) .map(String::trim) .filter(s -> !s.isEmpty()) .collect(Collectors.joining("\n")); String content = applyTemplate(FILE_OSGI_IMPORTS, osgiImports); writeFile(Paths.get(outputDirectoryPath, FILE_OSGI_IMPORTS), content); } catch (IOException e) { throw new MojoExecutionException( "Cannot create '" + FILE_OSGI_IMPORTS + "' file in '" + outputDirectoryPath + "'. " + e.getMessage(), e); } } public static void createConfigYaml(String configYamlContent, String outputDirectoryPath) throws MojoExecutionException { try { writeFile(Paths.get(outputDirectoryPath, FILE_CONFIG_YAML), configYamlContent); } catch (IOException e) { throw new MojoExecutionException("Cannot create '" + FILE_CONFIG_YAML + "' file in '" + outputDirectoryPath + "'. " + e.getMessage(), e); } } public static void createDependencyTree(String dependencyTreeContent, String outputDirectoryPath) throws MojoExecutionException { try { writeFile(Paths.get(outputDirectoryPath, FILE_DEPENDENCY_TREE), dependencyTreeContent); } catch (IOException e) { throw new MojoExecutionException( "Cannot create '" + FILE_DEPENDENCY_TREE + "' file in '" + outputDirectoryPath + "'. " + e.getMessage(), e); } } public static String createFeatureProperties(String outputDirectoryPath) throws MojoExecutionException { Path outputDirectory = Paths.get(outputDirectoryPath); Path buildPropertiesFile = outputDirectory.resolve(FILE_FEATURE_PROPERTIES); try { createDirectory(outputDirectory); String content = readTemplate(FILE_FEATURE_PROPERTIES); writeFile(buildPropertiesFile, content); return buildPropertiesFile.toString(); } catch (IOException e) { throw new MojoExecutionException("Cannot create file '" + buildPropertiesFile + "'. " + e.getMessage(), e); } } public static void createP2Inf(String featureName, String outputDirectoryPath) throws MojoExecutionException { Path outputDirectory = Paths.get(outputDirectoryPath); Path p2InfFile = outputDirectory.resolve(FILE_P2_INF); try { createDirectory(outputDirectory); String content = applyTemplate(FILE_P2_INF, featureName, featureName); writeFile(p2InfFile, content); } catch (IOException e) { throw new MojoExecutionException("Cannot create file '" + p2InfFile + "'. " + e.getMessage(), e); } } private static void createDirectory(Path directory) throws IOException { try { Files.createDirectories(directory); } catch (FileAlreadyExistsException e) { throw new IOException("Cannot create directory '" + directory + "' as a file already exists in there.", e); } catch (IOException e) { throw new IOException("Cannot create directory '" + directory + "'.", e); } } private static String readTemplate(String template) throws IOException { try (InputStream featureProperties = ConfigFileCreator.class.getResourceAsStream("/templates/" + template)) { if (featureProperties == null) { throw new IOException("Cannot find template file '" + template + "' in classpath resources."); } return IOUtils.toString(featureProperties, StandardCharsets.UTF_8.name()); } catch (IOException e) { throw new IOException("Cannot read template file '" + template + "' from classpath resources.", e); } } private static String applyTemplate(String template, String... variables) throws IOException { return String.format(readTemplate(template), variables); } private static void writeFile(Path file, String content) throws IOException { try { Files.write(file, content.getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { throw new IOException("Cannot write to file '" + file + "'.", e); } } }
plugin/src/main/java/org/wso2/carbon/uuf/maven/util/ConfigFileCreator.java
/* * Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved. * * WSO2 Inc. licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except * in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.wso2.carbon.uuf.maven.util; import org.apache.commons.io.IOUtils; import org.apache.maven.plugin.MojoExecutionException; import java.io.IOException; import java.io.InputStream; import java.nio.charset.StandardCharsets; import java.nio.file.FileAlreadyExistsException; import java.nio.file.Files; import java.nio.file.Path; import java.util.Arrays; import java.util.stream.Collectors; /** * Utility class that creates various configuration files needed by the UUF project creation Mojo's. * * @since 1.0.0 */ public class ConfigFileCreator { private static final String FILE_OSGI_IMPORTS = "osgi-imports"; private static final String FILE_FEATURE_PROPERTIES = "feature.properties"; private static final String FILE_P2_INF = "p2.inf"; public static void createOsgiImports(String osgiImportsConfig, Path outputDirectory) throws MojoExecutionException { if ((osgiImportsConfig == null) || osgiImportsConfig.isEmpty()) { return; } Path osgiImportsFile = outputDirectory.resolve(FILE_OSGI_IMPORTS); try { createDirectory(outputDirectory); String osgiImports = Arrays.stream(osgiImportsConfig.trim().split(",")) .map(String::trim) .filter(s -> !s.isEmpty()) .collect(Collectors.joining("\n")); String content = applyTemplate(FILE_OSGI_IMPORTS, osgiImports); writeFile(osgiImportsFile, content); } catch (IOException e) { throw new MojoExecutionException("Cannot create file '" + osgiImportsFile + "'. " + e.getMessage(), e); } } public static Path createFeatureProperties(Path outputDirectory) throws MojoExecutionException { Path buildPropertiesFile = outputDirectory.resolve(FILE_FEATURE_PROPERTIES); try { createDirectory(outputDirectory); String content = readTemplate(FILE_FEATURE_PROPERTIES); writeFile(buildPropertiesFile, content); return buildPropertiesFile; } catch (IOException e) { throw new MojoExecutionException("Cannot create file '" + buildPropertiesFile + "'. " + e.getMessage(), e); } } public static Path createP2Inf(String featureName, Path outputDirectory) throws MojoExecutionException { Path p2InfFile = outputDirectory.resolve(FILE_P2_INF); try { createDirectory(outputDirectory); String content = applyTemplate(FILE_P2_INF, featureName, featureName); writeFile(p2InfFile, content); return p2InfFile; } catch (IOException e) { throw new MojoExecutionException("Cannot create file '" + p2InfFile + "'. " + e.getMessage(), e); } } private static void createDirectory(Path directory) throws IOException { try { Files.createDirectories(directory); } catch (FileAlreadyExistsException e) { throw new IOException("Cannot create directory '" + directory + "' as a file already exists in there.", e); } catch (IOException e) { throw new IOException("Cannot create directory '" + directory + "'.", e); } } private static String readTemplate(String template) throws IOException { try (InputStream featureProperties = ConfigFileCreator.class.getResourceAsStream("/templates/" + template)) { if (featureProperties == null) { throw new IOException("Cannot find template file '" + template + "' in classpath resources."); } return IOUtils.toString(featureProperties, StandardCharsets.UTF_8.name()); } catch (IOException e) { throw new IOException("Cannot read template file '" + template + "' from classpath resources.", e); } } private static String applyTemplate(String template, String... variables) throws IOException { return String.format(readTemplate(template), variables); } private static void writeFile(Path file, String content) throws IOException { try { Files.write(file, content.getBytes(StandardCharsets.UTF_8)); } catch (IOException e) { throw new IOException("Cannot write to file '" + file + "'.", e); } } }
added 'createConfigYaml', 'createDependencyTree' static methods to ConfigFileCreator class
plugin/src/main/java/org/wso2/carbon/uuf/maven/util/ConfigFileCreator.java
added 'createConfigYaml', 'createDependencyTree' static methods to ConfigFileCreator class
Java
apache-2.0
779ad277cc946606c206694a8212dd22f6080096
0
vherilier/jmeter,ThiagoGarciaAlves/jmeter,ubikfsabbe/jmeter,ra0077/jmeter,DoctorQ/jmeter,vherilier/jmeter,fj11/jmeter,ra0077/jmeter,tuanhq/jmeter,hizhangqi/jmeter-1,DoctorQ/jmeter,d0k1/jmeter,etnetera/jmeter,max3163/jmeter,liwangbest/jmeter,DoctorQ/jmeter,ra0077/jmeter,ThiagoGarciaAlves/jmeter,ThiagoGarciaAlves/jmeter,kyroskoh/jmeter,hemikak/jmeter,hizhangqi/jmeter-1,thomsonreuters/jmeter,kyroskoh/jmeter,ubikfsabbe/jmeter,kschroeder/jmeter,vherilier/jmeter,max3163/jmeter,max3163/jmeter,ra0077/jmeter,ubikloadpack/jmeter,kschroeder/jmeter,liwangbest/jmeter,ubikfsabbe/jmeter,hemikak/jmeter,vherilier/jmeter,kschroeder/jmeter,thomsonreuters/jmeter,fj11/jmeter,d0k1/jmeter,ubikfsabbe/jmeter,hemikak/jmeter,etnetera/jmeter,d0k1/jmeter,hizhangqi/jmeter-1,tuanhq/jmeter,irfanah/jmeter,etnetera/jmeter,tuanhq/jmeter,ubikloadpack/jmeter,irfanah/jmeter,irfanah/jmeter,kyroskoh/jmeter,liwangbest/jmeter,ubikloadpack/jmeter,thomsonreuters/jmeter,d0k1/jmeter,ubikloadpack/jmeter,etnetera/jmeter,fj11/jmeter,etnetera/jmeter,hemikak/jmeter,max3163/jmeter
/* * Copyright 2003-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.http.parser; import java.io.ByteArrayInputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Iterator; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.tidy.Tidy; import org.xml.sax.SAXException; /** * HtmlParser implementation using JTidy. * */ class JTidyHTMLParser extends HTMLParser { private static final Logger log = LoggingManager.getLoggerForClass(); protected JTidyHTMLParser() { super(); } protected boolean isReusable() { return true; } /* * (non-Javadoc) * * @see org.apache.jmeter.protocol.http.parser.HTMLParser#getEmbeddedResourceURLs(byte[], * java.net.URL) */ public Iterator getEmbeddedResourceURLs(byte[] html, URL baseUrl, URLCollection urls) throws HTMLParseException { Document dom = null; try { dom = (Document) getDOM(html); } catch (SAXException se) { throw new HTMLParseException(se); } // Now parse the DOM tree scanNodes(dom, urls, baseUrl); return urls.iterator(); } /** * Scan nodes recursively, looking for embedded resources * * @param node - * initial node * @param urls - * container for URLs * @param baseUrl - * used to create absolute URLs * * @return new base URL */ private URL scanNodes(Node node, URLCollection urls, URL baseUrl) throws HTMLParseException { if (node == null) { return baseUrl; } String name = node.getNodeName(); int type = node.getNodeType(); switch (type) { case Node.DOCUMENT_NODE: scanNodes(((Document) node).getDocumentElement(), urls, baseUrl); break; case Node.ELEMENT_NODE: NamedNodeMap attrs = node.getAttributes(); if (name.equalsIgnoreCase(TAG_BASE)) { String tmp = getValue(attrs, ATT_HREF); if (tmp != null) try { baseUrl = new URL(baseUrl, tmp); } catch (MalformedURLException e) { throw new HTMLParseException(e); } break; } if (name.equalsIgnoreCase(TAG_IMAGE) || name.equalsIgnoreCase(TAG_EMBED)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } if (name.equalsIgnoreCase(TAG_APPLET)) { urls.addURL(getValue(attrs, "code"), baseUrl); break; } if (name.equalsIgnoreCase(TAG_INPUT)) { String src = getValue(attrs, ATT_SRC); String typ = getValue(attrs, ATT_TYPE); if ((src != null) && (typ.equalsIgnoreCase(ATT_IS_IMAGE))) { urls.addURL(src, baseUrl); } break; } if (name.equalsIgnoreCase(TAG_LINK) && getValue(attrs, ATT_REL).equalsIgnoreCase(STYLESHEET)) { urls.addURL(getValue(attrs, ATT_HREF), baseUrl); break; } if (name.equalsIgnoreCase(TAG_SCRIPT)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } if (name.equalsIgnoreCase(TAG_FRAME)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } String back = getValue(attrs, ATT_BACKGROUND); if (back != null) { urls.addURL(back, baseUrl); } if (name.equalsIgnoreCase(TAG_BGSOUND)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } NodeList children = node.getChildNodes(); if (children != null) { int len = children.getLength(); for (int i = 0; i < len; i++) { baseUrl = scanNodes(children.item(i), urls, baseUrl); } } break; // case Node.TEXT_NODE: // break; } return baseUrl; } /* * Helper method to get an attribute value, if it exists @param attrs list * of attributs @param attname attribute name @return */ private String getValue(NamedNodeMap attrs, String attname) { String v = null; Node n = attrs.getNamedItem(attname); if (n != null) v = n.getNodeValue(); return v; } /** * Returns <code>tidy</code> as HTML parser. * * @return a <code>tidy</code> HTML parser */ private static Tidy getTidyParser() { log.debug("Start : getParser"); Tidy tidy = new Tidy(); tidy.setCharEncoding(org.w3c.tidy.Configuration.UTF8); tidy.setQuiet(true); tidy.setShowWarnings(false); if (log.isDebugEnabled()) { log.debug("getParser : tidy parser created - " + tidy); } log.debug("End : getParser"); return tidy; } /** * Returns a node representing a whole xml given an xml document. * * @param text * an xml document (as a byte array) * @return a node representing a whole xml * * @throws SAXException * indicates an error parsing the xml document */ private static Node getDOM(byte[] text) throws SAXException { log.debug("Start : getDOM"); Node node = getTidyParser().parseDOM(new ByteArrayInputStream(text), null); if (log.isDebugEnabled()) { log.debug("node : " + node); } log.debug("End : getDOM"); return node; } }
src/protocol/http/org/apache/jmeter/protocol/http/parser/JTidyHTMLParser.java
/* * Copyright 2003-2004 The Apache Software Foundation. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.apache.jmeter.protocol.http.parser; import java.io.ByteArrayInputStream; import java.net.MalformedURLException; import java.net.URL; import java.util.Iterator; import org.apache.jorphan.logging.LoggingManager; import org.apache.log.Logger; import org.w3c.dom.Document; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.tidy.Tidy; import org.xml.sax.SAXException; /** * HtmlParser implementation using JTidy. * */ class JTidyHTMLParser extends HTMLParser { private static final Logger log = LoggingManager.getLoggerForClass(); protected JTidyHTMLParser() { super(); } protected boolean isReusable() { return true; } /* * (non-Javadoc) * * @see org.apache.jmeter.protocol.http.parser.HTMLParser#getEmbeddedResourceURLs(byte[], * java.net.URL) */ public Iterator getEmbeddedResourceURLs(byte[] html, URL baseUrl, URLCollection urls) throws HTMLParseException { Document dom = null; try { dom = (Document) getDOM(html); } catch (SAXException se) { throw new HTMLParseException(se); } // Now parse the DOM tree scanNodes(dom, urls, baseUrl); return urls.iterator(); } /** * Scan nodes recursively, looking for embedded resources * * @param node - * initial node * @param urls - * container for URLs * @param baseUrl - * used to create absolute URLs * * @return new base URL */ private URL scanNodes(Node node, URLCollection urls, URL baseUrl) throws HTMLParseException { if (node == null) { return baseUrl; } String name = node.getNodeName(); int type = node.getNodeType(); switch (type) { case Node.DOCUMENT_NODE: scanNodes(((Document) node).getDocumentElement(), urls, baseUrl); break; case Node.ELEMENT_NODE: NamedNodeMap attrs = node.getAttributes(); if (name.equalsIgnoreCase(TAG_BASE)) { String tmp = getValue(attrs, ATT_HREF); if (tmp != null) try { baseUrl = new URL(baseUrl, tmp); } catch (MalformedURLException e) { throw new HTMLParseException(e); } break; } if (name.equalsIgnoreCase(TAG_IMAGE) || name.equalsIgnoreCase(TAG_EMBED)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } if (name.equalsIgnoreCase(TAG_APPLET)) { urls.addURL(getValue(attrs, "code"), baseUrl); break; } if (name.equalsIgnoreCase(TAG_INPUT)) { String src = getValue(attrs, ATT_SRC); String typ = getValue(attrs, ATT_TYPE); if ((src != null) && (typ.equalsIgnoreCase(ATT_IS_IMAGE))) { urls.addURL(src, baseUrl); } break; } if (name.equalsIgnoreCase(TAG_LINK) && getValue(attrs, ATT_REL).equalsIgnoreCase(STYLESHEET)) { urls.addURL(getValue(attrs, ATT_HREF), baseUrl); break; } if (name.equalsIgnoreCase(TAG_SCRIPT)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } if (name.equalsIgnoreCase(TAG_FRAME)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } String back = getValue(attrs, ATT_BACKGROUND); if (back != null) { urls.addURL(back, baseUrl); break; } if (name.equalsIgnoreCase(TAG_BGSOUND)) { urls.addURL(getValue(attrs, ATT_SRC), baseUrl); break; } NodeList children = node.getChildNodes(); if (children != null) { int len = children.getLength(); for (int i = 0; i < len; i++) { baseUrl = scanNodes(children.item(i), urls, baseUrl); } } break; // case Node.TEXT_NODE: // break; } return baseUrl; } /* * Helper method to get an attribute value, if it exists @param attrs list * of attributs @param attname attribute name @return */ private String getValue(NamedNodeMap attrs, String attname) { String v = null; Node n = attrs.getNamedItem(attname); if (n != null) v = n.getNodeValue(); return v; } /** * Returns <code>tidy</code> as HTML parser. * * @return a <code>tidy</code> HTML parser */ private static Tidy getTidyParser() { log.debug("Start : getParser"); Tidy tidy = new Tidy(); tidy.setCharEncoding(org.w3c.tidy.Configuration.UTF8); tidy.setQuiet(true); tidy.setShowWarnings(false); if (log.isDebugEnabled()) { log.debug("getParser : tidy parser created - " + tidy); } log.debug("End : getParser"); return tidy; } /** * Returns a node representing a whole xml given an xml document. * * @param text * an xml document (as a byte array) * @return a node representing a whole xml * * @throws SAXException * indicates an error parsing the xml document */ private static Node getDOM(byte[] text) throws SAXException { log.debug("Start : getDOM"); Node node = getTidyParser().parseDOM(new ByteArrayInputStream(text), null); if (log.isDebugEnabled()) { log.debug("node : " + node); } log.debug("End : getDOM"); return node; } }
Need to continue after finding a background attribute - otherwise body with background is not scanned git-svn-id: 52ad764cdf1b64a6e804f4e5ad13917d3c4b2253@392539 13f79535-47bb-0310-9956-ffa450edef68
src/protocol/http/org/apache/jmeter/protocol/http/parser/JTidyHTMLParser.java
Need to continue after finding a background attribute - otherwise body with background is not scanned
Java
apache-2.0
63b5c8be369123a5b95695f5b73ec74e15c60c15
0
liamgh/liamgreenhughes-sl4a-tf101,olapaola/olapaola-android-scripting,olapaola/olapaola-android-scripting,olapaola/olapaola-android-scripting,olapaola/olapaola-android-scripting,vlinhd11/vlinhd11-android-scripting,cristiana214/cristianachavez214-cristianachavez,cristiana214/cristianachavez214-cristianachavez,liamgh/liamgreenhughes-sl4a-tf101,liamgh/liamgreenhughes-sl4a-tf101,vlinhd11/vlinhd11-android-scripting,olapaola/olapaola-android-scripting,vlinhd11/vlinhd11-android-scripting,cristiana214/cristianachavez214-cristianachavez,cristiana214/cristianachavez214-cristianachavez,olapaola/olapaola-android-scripting,liamgh/liamgreenhughes-sl4a-tf101,cristiana214/cristianachavez214-cristianachavez,cristiana214/cristianachavez214-cristianachavez,cristiana214/cristianachavez214-cristianachavez,liamgh/liamgreenhughes-sl4a-tf101,vlinhd11/vlinhd11-android-scripting,cristiana214/cristianachavez214-cristianachavez,liamgh/liamgreenhughes-sl4a-tf101,vlinhd11/vlinhd11-android-scripting,vlinhd11/vlinhd11-android-scripting,cristiana214/cristianachavez214-cristianachavez,cristiana214/cristianachavez214-cristianachavez,liamgh/liamgreenhughes-sl4a-tf101,olapaola/olapaola-android-scripting,olapaola/olapaola-android-scripting,liamgh/liamgreenhughes-sl4a-tf101,olapaola/olapaola-android-scripting,vlinhd11/vlinhd11-android-scripting,vlinhd11/vlinhd11-android-scripting,vlinhd11/vlinhd11-android-scripting,liamgh/liamgreenhughes-sl4a-tf101,vlinhd11/vlinhd11-android-scripting,liamgh/liamgreenhughes-sl4a-tf101,olapaola/olapaola-android-scripting,cristiana214/cristianachavez214-cristianachavez,liamgh/liamgreenhughes-sl4a-tf101,olapaola/olapaola-android-scripting,vlinhd11/vlinhd11-android-scripting
/* * Copyright (C) 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.ase.facade; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import android.app.Service; import android.content.Context; import android.os.Bundle; import com.google.ase.jsonrpc.RpcReceiver; import com.google.ase.rpc.Rpc; /** * This facade exposes the functionality to read from the event queue as an RPC, and the * functionality to write to the event queue as a pure java function. * * @author Felix Arends ([email protected]) * */ public class EventFacade implements RpcReceiver { /** * The maximum length of the event queue. Old events will be discarded when this limit is * exceeded. */ private static final int MAX_QUEUE_SIZE = 1024; final Queue<Bundle> mEventQueue = new ConcurrentLinkedQueue<Bundle>(); final Context mService; public EventFacade(final Service service) { mService = service; } @Rpc(description = "Receives the most recent event (i.e. location or sensor update, etc.)", returns = "Map of event properties.") public Bundle receiveEvent() { return mEventQueue.poll(); } /** * Posts an event on the event queue. */ void postEvent(String name, Bundle bundle) { Bundle event = new Bundle(bundle); event.putString("name", name); mEventQueue.add(event); if (mEventQueue.size() > MAX_QUEUE_SIZE) { mEventQueue.remove(); } } /** * Posts a simple event to the event queue. */ void postEvent(String name, String message) { Bundle event = new Bundle(); event.putString("name", name); event.putString("message", message); mEventQueue.add(event); } @Override public void shutdown() { } }
android/AndroidScriptingEnvironment/src/com/google/ase/facade/EventFacade.java
/* * Copyright (C) 2010 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.google.ase.facade; import java.util.Queue; import java.util.concurrent.ConcurrentLinkedQueue; import android.app.Service; import android.content.Context; import android.os.Bundle; import com.google.ase.jsonrpc.RpcReceiver; import com.google.ase.rpc.Rpc; /** * This facade exposes the functionality to read from the event queue as an RPC, and the * functionality to write to the event queue as a pure java function. * * @author Felix Arends ([email protected]) * */ public class EventFacade implements RpcReceiver { final Queue<Bundle> mEventQueue = new ConcurrentLinkedQueue<Bundle>(); final Context mService; public EventFacade(final Service service) { mService = service; } @Rpc(description = "Receives the most recent event (i.e. location or sensor update, etc.)", returns = "Map of event properties.") public Bundle receiveEvent() { return mEventQueue.poll(); } /** * Posts an event on the event queue. */ void postEvent(String name, Bundle bundle) { Bundle event = new Bundle(bundle); event.putString("name", name); mEventQueue.add(event); } /** * Posts a simple event to the event queue. */ void postEvent(String name, String message) { Bundle event = new Bundle(); event.putString("name", name); event.putString("message", message); mEventQueue.add(event); } @Override public void shutdown() { } }
Fixed potential memory leak in EventFacade: the queue fills up indefinitely if the events aren't polled.
android/AndroidScriptingEnvironment/src/com/google/ase/facade/EventFacade.java
Fixed potential memory leak in EventFacade: the queue fills up indefinitely if the events aren't polled.
Java
apache-2.0
7998adf5b5e485c3489212b429aa6bc902016108
0
fitermay/intellij-community,signed/intellij-community,hurricup/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,ol-loginov/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,hurricup/intellij-community,supersven/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,petteyg/intellij-community,ibinti/intellij-community,caot/intellij-community,blademainer/intellij-community,vladmm/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,apixandru/intellij-community,MER-GROUP/intellij-community,ivan-fedorov/intellij-community,ryano144/intellij-community,ahb0327/intellij-community,vladmm/intellij-community,holmes/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,caot/intellij-community,mglukhikh/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,semonte/intellij-community,diorcety/intellij-community,retomerz/intellij-community,robovm/robovm-studio,caot/intellij-community,FHannes/intellij-community,diorcety/intellij-community,asedunov/intellij-community,ernestp/consulo,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,caot/intellij-community,ol-loginov/intellij-community,fengbaicanhe/intellij-community,diorcety/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,adedayo/intellij-community,jagguli/intellij-community,petteyg/intellij-community,asedunov/intellij-community,mglukhikh/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,diorcety/intellij-community,caot/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,salguarnieri/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,fnouama/intellij-community,idea4bsd/idea4bsd,SerCeMan/intellij-community,robovm/robovm-studio,adedayo/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,caot/intellij-community,adedayo/intellij-community,gnuhub/intellij-community,ernestp/consulo,robovm/robovm-studio,lucafavatella/intellij-community,akosyakov/intellij-community,orekyuu/intellij-community,izonder/intellij-community,vvv1559/intellij-community,adedayo/intellij-community,ahb0327/intellij-community,ibinti/intellij-community,akosyakov/intellij-community,SerCeMan/intellij-community,adedayo/intellij-community,signed/intellij-community,retomerz/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,fnouama/intellij-community,caot/intellij-community,robovm/robovm-studio,holmes/intellij-community,signed/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,da1z/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,apixandru/intellij-community,semonte/intellij-community,Distrotech/intellij-community,fitermay/intellij-community,petteyg/intellij-community,amith01994/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,slisson/intellij-community,ibinti/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,MER-GROUP/intellij-community,kool79/intellij-community,ryano144/intellij-community,fengbaicanhe/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,izonder/intellij-community,ahb0327/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,signed/intellij-community,samthor/intellij-community,suncycheng/intellij-community,salguarnieri/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,lucafavatella/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,allotria/intellij-community,xfournet/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,fengbaicanhe/intellij-community,ThiagoGarciaAlves/intellij-community,kool79/intellij-community,samthor/intellij-community,akosyakov/intellij-community,ol-loginov/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,kool79/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,vladmm/intellij-community,da1z/intellij-community,fnouama/intellij-community,supersven/intellij-community,muntasirsyed/intellij-community,blademainer/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,FHannes/intellij-community,nicolargo/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,petteyg/intellij-community,vvv1559/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,hurricup/intellij-community,nicolargo/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,fnouama/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,dslomov/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,kool79/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,blademainer/intellij-community,supersven/intellij-community,allotria/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,kdwink/intellij-community,supersven/intellij-community,izonder/intellij-community,salguarnieri/intellij-community,akosyakov/intellij-community,samthor/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,holmes/intellij-community,fengbaicanhe/intellij-community,nicolargo/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,ahb0327/intellij-community,petteyg/intellij-community,da1z/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,signed/intellij-community,adedayo/intellij-community,suncycheng/intellij-community,MER-GROUP/intellij-community,muntasirsyed/intellij-community,tmpgit/intellij-community,joewalnes/idea-community,hurricup/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,FHannes/intellij-community,supersven/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,ol-loginov/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,joewalnes/idea-community,blademainer/intellij-community,da1z/intellij-community,fitermay/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,ibinti/intellij-community,fitermay/intellij-community,ftomassetti/intellij-community,suncycheng/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,joewalnes/idea-community,ivan-fedorov/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,MichaelNedzelsky/intellij-community,ernestp/consulo,izonder/intellij-community,lucafavatella/intellij-community,michaelgallacher/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,fnouama/intellij-community,xfournet/intellij-community,ivan-fedorov/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,signed/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,suncycheng/intellij-community,Lekanich/intellij-community,TangHao1987/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,consulo/consulo,asedunov/intellij-community,da1z/intellij-community,fnouama/intellij-community,diorcety/intellij-community,izonder/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,alphafoobar/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,izonder/intellij-community,joewalnes/idea-community,izonder/intellij-community,supersven/intellij-community,amith01994/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,hurricup/intellij-community,ryano144/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,consulo/consulo,clumsy/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,slisson/intellij-community,asedunov/intellij-community,samthor/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,Distrotech/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,diorcety/intellij-community,amith01994/intellij-community,gnuhub/intellij-community,ibinti/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,mglukhikh/intellij-community,robovm/robovm-studio,muntasirsyed/intellij-community,apixandru/intellij-community,ibinti/intellij-community,wreckJ/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,tmpgit/intellij-community,caot/intellij-community,MichaelNedzelsky/intellij-community,retomerz/intellij-community,ivan-fedorov/intellij-community,FHannes/intellij-community,semonte/intellij-community,consulo/consulo,kool79/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,FHannes/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,diorcety/intellij-community,da1z/intellij-community,akosyakov/intellij-community,semonte/intellij-community,allotria/intellij-community,vvv1559/intellij-community,slisson/intellij-community,ahb0327/intellij-community,slisson/intellij-community,holmes/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,jagguli/intellij-community,semonte/intellij-community,clumsy/intellij-community,kdwink/intellij-community,ernestp/consulo,caot/intellij-community,wreckJ/intellij-community,caot/intellij-community,semonte/intellij-community,slisson/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,MichaelNedzelsky/intellij-community,blademainer/intellij-community,ryano144/intellij-community,dslomov/intellij-community,joewalnes/idea-community,hurricup/intellij-community,kdwink/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,signed/intellij-community,youdonghai/intellij-community,izonder/intellij-community,samthor/intellij-community,fitermay/intellij-community,wreckJ/intellij-community,FHannes/intellij-community,pwoodworth/intellij-community,joewalnes/idea-community,mglukhikh/intellij-community,blademainer/intellij-community,gnuhub/intellij-community,da1z/intellij-community,SerCeMan/intellij-community,signed/intellij-community,MichaelNedzelsky/intellij-community,ivan-fedorov/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,samthor/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,blademainer/intellij-community,consulo/consulo,TangHao1987/intellij-community,wreckJ/intellij-community,amith01994/intellij-community,orekyuu/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,FHannes/intellij-community,pwoodworth/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,asedunov/intellij-community,izonder/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,izonder/intellij-community,semonte/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,alphafoobar/intellij-community,retomerz/intellij-community,semonte/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,supersven/intellij-community,petteyg/intellij-community,Lekanich/intellij-community,holmes/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,idea4bsd/idea4bsd,lucafavatella/intellij-community,adedayo/intellij-community,xfournet/intellij-community,retomerz/intellij-community,adedayo/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,asedunov/intellij-community,MichaelNedzelsky/intellij-community,slisson/intellij-community,retomerz/intellij-community,pwoodworth/intellij-community,tmpgit/intellij-community,salguarnieri/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,fnouama/intellij-community,caot/intellij-community,SerCeMan/intellij-community,youdonghai/intellij-community,fnouama/intellij-community,blademainer/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,MichaelNedzelsky/intellij-community,semonte/intellij-community,dslomov/intellij-community,clumsy/intellij-community,allotria/intellij-community,clumsy/intellij-community,kool79/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,clumsy/intellij-community,michaelgallacher/intellij-community,dslomov/intellij-community,allotria/intellij-community,Lekanich/intellij-community,holmes/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,idea4bsd/idea4bsd,youdonghai/intellij-community,petteyg/intellij-community,TangHao1987/intellij-community,vvv1559/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,akosyakov/intellij-community,ivan-fedorov/intellij-community,vladmm/intellij-community,Lekanich/intellij-community,pwoodworth/intellij-community,hurricup/intellij-community,semonte/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,supersven/intellij-community,hurricup/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,ibinti/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,fengbaicanhe/intellij-community,retomerz/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,blademainer/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,asedunov/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,joewalnes/idea-community,supersven/intellij-community,lucafavatella/intellij-community,signed/intellij-community,hurricup/intellij-community,holmes/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,joewalnes/idea-community,pwoodworth/intellij-community,FHannes/intellij-community,semonte/intellij-community,fnouama/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,orekyuu/intellij-community,SerCeMan/intellij-community,kool79/intellij-community,clumsy/intellij-community,alphafoobar/intellij-community,samthor/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,jagguli/intellij-community,ryano144/intellij-community,muntasirsyed/intellij-community,ahb0327/intellij-community,da1z/intellij-community,adedayo/intellij-community,slisson/intellij-community,signed/intellij-community,xfournet/intellij-community,semonte/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,samthor/intellij-community,allotria/intellij-community,jagguli/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,robovm/robovm-studio,amith01994/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,clumsy/intellij-community,diorcety/intellij-community,clumsy/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,petteyg/intellij-community,fitermay/intellij-community,apixandru/intellij-community,dslomov/intellij-community,samthor/intellij-community,kool79/intellij-community,tmpgit/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,TangHao1987/intellij-community,da1z/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,samthor/intellij-community,consulo/consulo,pwoodworth/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,ol-loginov/intellij-community,hurricup/intellij-community,fnouama/intellij-community,MER-GROUP/intellij-community,jagguli/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,slisson/intellij-community,fitermay/intellij-community,SerCeMan/intellij-community,signed/intellij-community,salguarnieri/intellij-community,ftomassetti/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,robovm/robovm-studio,ol-loginov/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,jagguli/intellij-community,asedunov/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,ernestp/consulo,fnouama/intellij-community,fengbaicanhe/intellij-community,dslomov/intellij-community,salguarnieri/intellij-community,alphafoobar/intellij-community,ryano144/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,xfournet/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,vvv1559/intellij-community,jagguli/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,robovm/robovm-studio,supersven/intellij-community,wreckJ/intellij-community,izonder/intellij-community,supersven/intellij-community,ernestp/consulo,clumsy/intellij-community,fengbaicanhe/intellij-community,gnuhub/intellij-community,ftomassetti/intellij-community,ivan-fedorov/intellij-community,apixandru/intellij-community,asedunov/intellij-community,wreckJ/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,orekyuu/intellij-community,da1z/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,slisson/intellij-community,caot/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,allotria/intellij-community,robovm/robovm-studio,fitermay/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,akosyakov/intellij-community,izonder/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,muntasirsyed/intellij-community,consulo/consulo,vladmm/intellij-community,ibinti/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,robovm/robovm-studio,hurricup/intellij-community
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.actionSystem.ex; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NonNls; import java.util.ArrayList; import java.util.List; public class ActionUtil { @NonNls private static final String WAS_ENABLED_BEFORE_DUMB = "WAS_ENABLED_BEFORE_DUMB"; @NonNls public static final String WOULD_BE_ENABLED_IF_NOT_DUMB_MODE = "WOULD_BE_ENABLED_IF_NOT_DUMB_MODE"; @NonNls private static final String WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE = "WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE"; private ActionUtil() { } public static void showDumbModeWarning(AnActionEvent... events) { Project project = null; List<String> actionNames = new ArrayList<String>(); for (final AnActionEvent event : events) { final String s = event.getPresentation().getText(); if (StringUtil.isNotEmpty(s)) { actionNames.add(s); } final Project _project = (Project)event.getDataContext().getData(DataConstantsEx.PROJECT); if (_project != null && project == null) { project = _project; } } if (project == null) { return; } String message; final String beAvailableUntil = " available while IntelliJ IDEA is updating indices"; if (actionNames.isEmpty()) { message = "This action is not" + beAvailableUntil; } else if (actionNames.size() == 1) { message = "'" + actionNames.get(0) + "' action is not" + beAvailableUntil; } else { message = "None of the following actions are" + beAvailableUntil + ": " + StringUtil.join(actionNames, ", "); } DumbService.getInstance(project).showDumbModeNotification(message); } /** * @param action action * @param e action event * @param beforeActionPerformed whether to call * {@link com.intellij.openapi.actionSystem.AnAction#beforeActionPerformedUpdate(com.intellij.openapi.actionSystem.AnActionEvent)} * or * {@link com.intellij.openapi.actionSystem.AnAction#update(com.intellij.openapi.actionSystem.AnActionEvent)} * @return true if update tried to access indices in dumb mode */ public static boolean performDumbAwareUpdate(AnAction action, AnActionEvent e, boolean beforeActionPerformed) { final Presentation presentation = e.getPresentation(); final Boolean wasEnabledBefore = (Boolean)presentation.getClientProperty(WAS_ENABLED_BEFORE_DUMB); final Project project = (Project)e.getDataContext().getData(DataConstantsEx.PROJECT); final boolean dumbMode = project != null && DumbService.getInstance(project).isDumb(); if (wasEnabledBefore != null && !dumbMode) { presentation.putClientProperty(WAS_ENABLED_BEFORE_DUMB, null); presentation.setEnabled(wasEnabledBefore.booleanValue()); presentation.setVisible(true); } final boolean enabledBeforeUpdate = presentation.isEnabled(); final boolean notAllowed = dumbMode && !(action instanceof DumbAware) && !(action instanceof ActionGroup); try { if (beforeActionPerformed) { action.beforeActionPerformedUpdate(e); } else { action.update(e); } presentation.putClientProperty(WOULD_BE_ENABLED_IF_NOT_DUMB_MODE, notAllowed && presentation.isEnabled()); presentation.putClientProperty(WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE, notAllowed && presentation.isVisible()); } catch (IndexNotReadyException e1) { if (notAllowed) { return true; } throw e1; } finally { if (notAllowed) { if (wasEnabledBefore == null) { presentation.putClientProperty(WAS_ENABLED_BEFORE_DUMB, enabledBeforeUpdate); } presentation.setEnabled(false); } } return false; } public static boolean lastUpdateAndCheckDumb(AnAction action, AnActionEvent e, boolean visibilityMatters) { performDumbAwareUpdate(action, e, true); final Project project = (Project)e.getDataContext().getData(DataConstantsEx.PROJECT); if (project != null && DumbService.getInstance(project).isDumb() && !(action instanceof DumbAware)) { if (Boolean.FALSE.equals(e.getPresentation().getClientProperty(WOULD_BE_ENABLED_IF_NOT_DUMB_MODE))) { return false; } if (visibilityMatters && Boolean.FALSE.equals(e.getPresentation().getClientProperty(WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE))) { return false; } showDumbModeWarning(e); return false; } if (!e.getPresentation().isEnabled()) { return false; } if (visibilityMatters && !e.getPresentation().isVisible()) { return false; } return true; } }
platform/platform-api/src/com/intellij/openapi/actionSystem/ex/ActionUtil.java
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.openapi.actionSystem.ex; import com.intellij.openapi.actionSystem.ActionGroup; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.AnActionEvent; import com.intellij.openapi.actionSystem.Presentation; import com.intellij.openapi.project.DumbAware; import com.intellij.openapi.project.DumbService; import com.intellij.openapi.project.IndexNotReadyException; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.text.StringUtil; import org.jetbrains.annotations.NonNls; import java.util.ArrayList; import java.util.List; public class ActionUtil { @NonNls private static final String WAS_ENABLED_BEFORE_DUMB = "WAS_ENABLED_BEFORE_DUMB"; @NonNls public static final String WOULD_BE_ENABLED_IF_NOT_DUMB_MODE = "WOULD_BE_ENABLED_IF_NOT_DUMB_MODE"; @NonNls private static final String WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE = "WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE"; private ActionUtil() { } public static void showDumbModeWarning(AnActionEvent... events) { Project project = null; List<String> actionNames = new ArrayList<String>(); for (final AnActionEvent event : events) { final String s = event.getPresentation().getText(); if (StringUtil.isNotEmpty(s)) { actionNames.add(s); } final Project _project = (Project)event.getDataContext().getData(DataConstantsEx.PROJECT); if (_project != null && project == null) { project = _project; } } if (project == null) { return; } String message; final String beAvailableUntil = " available while IntelliJ IDEA is updating indices"; if (actionNames.isEmpty()) { message = "This action is not" + beAvailableUntil; } else if (actionNames.size() == 1) { message = "'" + actionNames.get(0) + "' action is not" + beAvailableUntil; } else { message = "None of the following actions are" + beAvailableUntil + ": " + StringUtil.join(actionNames, ", "); } DumbService.getInstance(project).showDumbModeNotification(message); } /** * @param action action * @param e action event * @param beforeActionPerformed whether to call * {@link com.intellij.openapi.actionSystem.AnAction#beforeActionPerformedUpdate(com.intellij.openapi.actionSystem.AnActionEvent)} * or * {@link com.intellij.openapi.actionSystem.AnAction#update(com.intellij.openapi.actionSystem.AnActionEvent)} * @return true if update tried to access indices in dumb mode */ public static boolean performDumbAwareUpdate(AnAction action, AnActionEvent e, boolean beforeActionPerformed) { final Presentation presentation = e.getPresentation(); final Boolean wasEnabledBefore = (Boolean)presentation.getClientProperty(WAS_ENABLED_BEFORE_DUMB); final Project project = (Project)e.getDataContext().getData(DataConstantsEx.PROJECT); final boolean dumbMode = project != null && DumbService.getInstance(project).isDumb(); if (wasEnabledBefore != null && !dumbMode) { presentation.putClientProperty(WAS_ENABLED_BEFORE_DUMB, null); presentation.setEnabled(wasEnabledBefore.booleanValue()); presentation.setVisible(true); } final boolean enabledBeforeUpdate = presentation.isEnabled(); final boolean notAllowed = dumbMode && !(action instanceof DumbAware) && !(action instanceof ActionGroup); try { if (beforeActionPerformed) { action.beforeActionPerformedUpdate(e); } else { action.update(e); } presentation.putClientProperty(WOULD_BE_ENABLED_IF_NOT_DUMB_MODE, notAllowed && presentation.isEnabled()); presentation.putClientProperty(WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE, notAllowed && presentation.isVisible()); } catch (IndexNotReadyException e1) { if (notAllowed) { return true; } throw e1; } finally { if (notAllowed) { if (wasEnabledBefore == null) { presentation.putClientProperty(WAS_ENABLED_BEFORE_DUMB, enabledBeforeUpdate); } presentation.setEnabled(false); presentation.setVisible(false); } } return false; } public static boolean lastUpdateAndCheckDumb(AnAction action, AnActionEvent e, boolean visibilityMatters) { performDumbAwareUpdate(action, e, true); final Project project = (Project)e.getDataContext().getData(DataConstantsEx.PROJECT); if (project != null && DumbService.getInstance(project).isDumb() && !(action instanceof DumbAware)) { if (Boolean.FALSE.equals(e.getPresentation().getClientProperty(WOULD_BE_ENABLED_IF_NOT_DUMB_MODE))) { return false; } if (visibilityMatters && Boolean.FALSE.equals(e.getPresentation().getClientProperty(WOULD_BE_VISIBLE_IF_NOT_DUMB_MODE))) { return false; } showDumbModeWarning(e); return false; } if (!e.getPresentation().isEnabled()) { return false; } if (visibilityMatters && !e.getPresentation().isVisible()) { return false; } return true; } }
Actions: do not hide non-dumb-aware actions, just disable them
platform/platform-api/src/com/intellij/openapi/actionSystem/ex/ActionUtil.java
Actions: do not hide non-dumb-aware actions, just disable them
Java
apache-2.0
db2215dc0dc7344627efb263765986cbc0c5ad9b
0
mbhk/barcode4j,mbhk/barcode4j
/* * Copyright 2006 Jeremias Maerki. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.krysalis.barcode4j.impl.datamatrix; import org.krysalis.barcode4j.TwoDimBarcodeLogicHandler; /** * Top-level class for the logic part of the DataMatrix implementation. * * @version $Id$ */ public class DataMatrixLogicImpl { private static final boolean DEBUG = false; /** * Generates the barcode logic. * @param logic the logic handler to receive generated events * @param msg the message to encode */ public void generateBarcodeLogic(TwoDimBarcodeLogicHandler logic, String msg) { //ECC 200 //1. step: Data encodation String encoded = DataMatrixHighLevelEncoder.encodeHighLevel(msg); DataMatrixSymbolInfo symbolInfo = DataMatrixSymbolInfo.lookup(encoded.length()); if (DEBUG) System.out.println(symbolInfo); //2. step: ECC generation String codewords = DataMatrixErrorCorrection.encodeECC200( encoded, symbolInfo); //3. step: Module placement in Matrix DefaultDataMatrixPlacement placement = new DefaultDataMatrixPlacement( codewords, symbolInfo.getSymbolDataWidth(), symbolInfo.getSymbolDataHeight()); placement.place(); //4. step: low-level encoding logic.startBarcode(msg, msg); encodeLowLevel(logic, placement, symbolInfo); logic.endBarcode(); } private void encodeLowLevel(TwoDimBarcodeLogicHandler logic, DataMatrixPlacement placement, DataMatrixSymbolInfo symbolInfo) { int symbolWidth = symbolInfo.getSymbolDataWidth(); int symbolHeight = symbolInfo.getSymbolDataHeight(); for (int y = 0; y < symbolHeight; y++) { if ((y % symbolInfo.matrixHeight) == 0) { logic.startRow(); for (int x = 0; x < symbolInfo.getSymbolWidth(); x++) { logic.addBar((x % 2) == 0, 1); } logic.endRow(); } logic.startRow(); for (int x = 0; x < symbolWidth; x++) { if ((x % symbolInfo.matrixWidth) == 0) { logic.addBar(true, 1); //left finder edge } logic.addBar(placement.getBit(x, y), 1); if ((x % symbolInfo.matrixWidth) == symbolInfo.matrixWidth - 1) { logic.addBar((y % 2) == 0, 1); //right finder edge } } logic.endRow(); if ((y % symbolInfo.matrixHeight) == symbolInfo.matrixHeight - 1) { logic.startRow(); for (int x = 0; x < symbolInfo.getSymbolWidth(); x++) { logic.addBar(true, 1); } logic.endRow(); } } } }
barcode4j/src/java/org/krysalis/barcode4j/impl/datamatrix/DataMatrixLogicImpl.java
/* * Copyright 2006 Jeremias Maerki. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* $Id$ */ package org.krysalis.barcode4j.impl.datamatrix; import org.krysalis.barcode4j.TwoDimBarcodeLogicHandler; /** * Top-level class for the logic part of the DataMatrix implementation. * * @version $Id$ */ public class DataMatrixLogicImpl { private static final boolean DEBUG = true; /** * Generates the barcode logic. * @param logic the logic handler to receive generated events * @param msg the message to encode */ public void generateBarcodeLogic(TwoDimBarcodeLogicHandler logic, String msg) { //ECC 200 //1. step: Data encodation String encoded = DataMatrixHighLevelEncoder.encodeHighLevel(msg); DataMatrixSymbolInfo symbolInfo = DataMatrixSymbolInfo.lookup(encoded.length()); if (DEBUG) System.out.println(symbolInfo); //2. step: ECC generation String codewords = DataMatrixErrorCorrection.encodeECC200( encoded, symbolInfo); //3. step: Module placement in Matrix DefaultDataMatrixPlacement placement = new DefaultDataMatrixPlacement( codewords, symbolInfo.getSymbolDataWidth(), symbolInfo.getSymbolDataHeight()); placement.place(); //4. step: low-level encoding logic.startBarcode(msg, msg); encodeLowLevel(logic, placement, symbolInfo); logic.endBarcode(); } private void encodeLowLevel(TwoDimBarcodeLogicHandler logic, DataMatrixPlacement placement, DataMatrixSymbolInfo symbolInfo) { int symbolWidth = symbolInfo.getSymbolDataWidth(); int symbolHeight = symbolInfo.getSymbolDataHeight(); for (int y = 0; y < symbolHeight; y++) { if ((y % symbolInfo.matrixHeight) == 0) { logic.startRow(); for (int x = 0; x < symbolInfo.getSymbolWidth(); x++) { logic.addBar((x % 2) == 0, 1); } logic.endRow(); } logic.startRow(); for (int x = 0; x < symbolWidth; x++) { if ((x % symbolInfo.matrixWidth) == 0) { logic.addBar(true, 1); //left finder edge } logic.addBar(placement.getBit(x, y), 1); if ((x % symbolInfo.matrixWidth) == symbolInfo.matrixWidth - 1) { logic.addBar((y % 2) == 0, 1); //right finder edge } } logic.endRow(); if ((y % symbolInfo.matrixHeight) == symbolInfo.matrixHeight - 1) { logic.startRow(); for (int x = 0; x < symbolInfo.getSymbolWidth(); x++) { logic.addBar(true, 1); } logic.endRow(); } } } }
Disabled logging output.
barcode4j/src/java/org/krysalis/barcode4j/impl/datamatrix/DataMatrixLogicImpl.java
Disabled logging output.
Java
apache-2.0
3e7706168b090e9c70756a42c0482f7ba4e9c7c3
0
wildfly/wildfly-http-client
/* * JBoss, Home of Professional Open Source. * Copyright 2022 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.httpclient.common; import io.undertow.client.ClientCallback; import io.undertow.client.ClientConnection; import io.undertow.client.ClientExchange; import io.undertow.client.ClientRequest; import io.undertow.client.ClientResponse; import io.undertow.client.ContinueNotification; import io.undertow.client.PushCallback; import io.undertow.connector.ByteBufferPool; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.server.handlers.PathHandler; import io.undertow.util.AbstractAttachable; import io.undertow.util.AttachmentKey; import io.undertow.util.AttachmentList; import io.undertow.util.HttpString; import org.wildfly.security.manager.WildFlySecurityManager; import org.xnio.OptionMap; import org.xnio.XnioWorker; import org.xnio.channels.StreamSinkChannel; import org.xnio.channels.StreamSourceChannel; import javax.net.ssl.SSLContext; import java.io.IOException; import java.net.URI; import java.util.List; import static org.jboss.marshalling.ClassNameTransformer.JAVAEE_TO_JAKARTAEE; import static org.wildfly.httpclient.common.HttpMarshallerFactory.DEFAULT_FACTORY; import static org.wildfly.httpclient.common.Protocol.VERSION_ONE_PATH; import static org.wildfly.httpclient.common.Protocol.VERSION_TWO_PATH; /** * EE namespace interoperability implementation for allowing Jakarta EE namespace servers and clients communication with * Javax EE namespace endpoints. * * EE namespace interoperability must be enabled on all Jakarta servers and clients to make communication * among them possible. * * @author Flavia Rainone * @author Richard Opalka */ final class EENamespaceInteroperability { /** * Indicates if EE namespace interoperable mode is enabled. */ static final boolean EE_NAMESPACE_INTEROPERABLE_MODE = Boolean.parseBoolean( WildFlySecurityManager.getPropertyPrivileged("org.wildfly.ee.namespace.interop", "false")); // header indicating the protocol version mode that is being used by the request/response sender private static final HttpString PROTOCOL_VERSION = new HttpString("x-wf-version"); // value for PROTOCOL_VERSION header: used to handshake a higher version, only when both ends use EE jakarta namespace private static final String LATEST_VERSION = String.valueOf(Protocol.LATEST); // key used to attach http marshaller factory to a client request / server exchange private static final AttachmentKey<HttpMarshallerFactory> HTTP_MARSHALLER_FACTORY_KEY = AttachmentKey.create(HttpMarshallerFactory.class); // key used to attach an http unmarshaller factory to a server exchange private static final AttachmentKey<HttpMarshallerFactory> HTTP_UNMARSHALLER_FACTORY_KEY = AttachmentKey.create(HttpMarshallerFactory.class); // marshaller factory to be used when Javax<->Jakarta transformation is needed private static final HttpMarshallerFactory INTEROPERABLE_MARSHALLER_FACTORY = new HttpMarshallerFactory(JAVAEE_TO_JAKARTAEE); static { if (EE_NAMESPACE_INTEROPERABLE_MODE) { HttpClientMessages.MESSAGES.javaeeToJakartaeeBackwardCompatibilityLayerInstalled(); } } private EENamespaceInteroperability() {} /** * Wraps the HTTP server handler into an EE namespace interoperable handler. Such handler implements the * EE namespace interoperability at the server side before delegating to the wrapped {@code httpHandler} * * @param httpHandler the handler to be wrapped * @return handler the ee namespace interoperability handler */ static HttpHandler createInteroperabilityHandler(HttpHandler httpHandler) { return createProtocolVersionHttpHandler(new EENamespaceInteroperabilityHandler(httpHandler), new JakartaNamespaceHandler(httpHandler)); } static HttpHandler createProtocolVersionHttpHandler(HttpHandler interoperabilityHandler, HttpHandler latestProtocolHandler) { final PathHandler versionPathHandler = new PathHandler(); versionPathHandler.addPrefixPath(VERSION_ONE_PATH, interoperabilityHandler); versionPathHandler.addPrefixPath(VERSION_TWO_PATH, latestProtocolHandler); return versionPathHandler; } /** * Returns the HTTPMarshallerFactoryProvider instance responsible for taking care of marshalling * and unmarshalling according to the values negotiated by the ee namespace interoperability headers. * * @return the HTTPMarshallerFactoryProvider. All marshalling and unmarshalling done at both server * and client side have to be done through a factory provided by this object. */ static HttpMarshallerFactoryProvider getHttpMarshallerFactoryProvider() { return new HttpMarshallerFactoryProvider() { @Override public HttpMarshallerFactory getMarshallerFactory(AbstractAttachable attachable) { return attachable.getAttachment(HTTP_MARSHALLER_FACTORY_KEY); } @Override public HttpMarshallerFactory getUnmarshallerFactory(AbstractAttachable attachable) { return attachable.getAttachment(HTTP_UNMARSHALLER_FACTORY_KEY); } }; } /** * Returns the HTTP connection pool factory when EE namespace interoperability mode is on. This factory * creates EE namespace interoperable connections to the server. * * @return the {@link HttpConnectionPoolFactory}. */ static HttpConnectionPoolFactory getHttpConnectionPoolFactory() { return (HttpConnectionPool::new); } /* Client side EE namespace interoperability */ private static class HttpConnectionPool extends org.wildfly.httpclient.common.HttpConnectionPool { private volatile int protocolVersion = -1; protected HttpConnectionPool(int maxConnections, int maxStreamsPerConnection, XnioWorker worker, ByteBufferPool byteBufferPool, OptionMap options, HostPool hostPool, long connectionIdleTimeout) { super(maxConnections, maxStreamsPerConnection, worker, byteBufferPool, options, hostPool, connectionIdleTimeout); } @Override int getProtocolVersion() { return protocolVersion == -1? 1 : protocolVersion; } @Override protected org.wildfly.httpclient.common.HttpConnectionPool.ClientConnectionHolder createClientConnectionHolder(ClientConnection connection, URI uri, SSLContext sslContext) { return new ClientConnectionHolder(connection, uri, sslContext); } protected class ClientConnectionHolder extends org.wildfly.httpclient.common.HttpConnectionPool.ClientConnectionHolder { private ClientConnectionHolder(ClientConnection connection, URI uri, SSLContext sslContext) { super (connection, uri, sslContext); } @Override public void sendRequest(ClientRequest request, ClientCallback<ClientExchange> callback) { switch (protocolVersion) { case -1: // new connection pool: send the protocol version header once with LATEST_VERSION value to see what will be the response request.getRequestHeaders().put(PROTOCOL_VERSION, LATEST_VERSION); request.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); break; case Protocol.JAVAEE_PROTOCOL_VERSION: // connection is Javax EE, so we need to transform class names Javax<->Jakarta request.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); break; case org.wildfly.httpclient.common.Protocol.JAKARTAEE_PROTOCOL_VERSION: default: // connection already set as Jakarta namespace, default factory can be used for marshalling // (no transformation needed) request.getRequestHeaders().put(PROTOCOL_VERSION, LATEST_VERSION); request.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); } super.sendRequest(request, new ClientCallback<ClientExchange>() { @Override public void completed(ClientExchange result) { // wrap the exchange, to handle interoperability at the result (see below) callback.completed(new EEInteroperableClientExchange(result)); } @Override public void failed(IOException e) { callback.failed(e); } }); } private final class EEInteroperableClientExchange implements ClientExchange { private final ClientExchange wrappedExchange; public EEInteroperableClientExchange(ClientExchange clientExchange) { this.wrappedExchange = clientExchange; } @Override public void setResponseListener(final ClientCallback<ClientExchange> responseListener) { wrappedExchange.setResponseListener(new ClientCallback<ClientExchange>() { @Override public void completed(ClientExchange result) { // this method adds the factory to the request instead of response, this is more efficient // we prevent adding when jakartaEE is already true and creating a new entry in the response attachment map final ClientResponse response = result.getResponse(); if (protocolVersion == -1) { // we need to check for protocol version header to define the protocol version of the pool if (LATEST_VERSION.equals(response.getResponseHeaders().getFirst(PROTOCOL_VERSION))) { // this indicates this is the first response server sends, set the protocol to 2 protocolVersion = Protocol.LATEST; // overwrite previous attachment, no transformation is needed for this connection any more result.getRequest().putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); } else { protocolVersion = Protocol.JAVAEE_PROTOCOL_VERSION; //regarding marsh. factory key, do nothing, the connection is not Jakarta and the marshalling factory provider is already interoperable } } // else: do nothing, request already contains the default marshalling factory responseListener.completed(result); } @Override public void failed(IOException e) { responseListener.failed(e); } }); } @Override public void setContinueHandler(ContinueNotification continueHandler) { wrappedExchange.setContinueHandler(continueHandler); } @Override public void setPushHandler(PushCallback pushCallback) { wrappedExchange.setPushHandler(pushCallback); } @Override public StreamSinkChannel getRequestChannel() { return wrappedExchange.getRequestChannel(); } @Override public StreamSourceChannel getResponseChannel() { return wrappedExchange.getResponseChannel(); } @Override public ClientRequest getRequest() { return wrappedExchange.getRequest(); } @Override public ClientResponse getResponse() { return wrappedExchange.getResponse(); } @Override public ClientResponse getContinueResponse() { return wrappedExchange.getContinueResponse(); } @Override public ClientConnection getConnection() { return wrappedExchange.getConnection(); } @Override public <T> T getAttachment(AttachmentKey<T> key) { return wrappedExchange.getAttachment(key); } @Override public <T> List<T> getAttachmentList(AttachmentKey<? extends List<T>> key) { return wrappedExchange.getAttachmentList(key); } @Override public <T> T putAttachment(AttachmentKey<T> key, T value) { return wrappedExchange.putAttachment(key, value); } @Override public <T> T removeAttachment(AttachmentKey<T> key) { return wrappedExchange.removeAttachment(key); } @Override public <T> void addToAttachmentList(AttachmentKey<AttachmentList<T>> key, T value) { wrappedExchange.addToAttachmentList(key, value); } } } } /* Server side EE namespace interoperability */ private static class EENamespaceInteroperabilityHandler implements HttpHandler { private final HttpHandler next; EENamespaceInteroperabilityHandler(HttpHandler next) { this.next = next; } @Override public void handleRequest(HttpServerExchange exchange) throws Exception { if (LATEST_VERSION.equals(exchange.getRequestHeaders().getFirst(PROTOCOL_VERSION))) { // respond that this end also supports version two exchange.getResponseHeaders().add(PROTOCOL_VERSION, LATEST_VERSION); // transformation is required for unmarshalling because client is on EE namespace interoperable mode exchange.putAttachment(HTTP_UNMARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); // no transformation required for marshalling, server is sending response in Jakarta exchange.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); } else { // transformation is required for unmarshalling request and marshalling response, // because server is interoperable mode and the lack of a header indicates this is // either a Javax EE client or a Jakarta EE client that is not interoperable // the latter case will lead to an error when unmarshalling at client side) exchange.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); exchange.putAttachment(HTTP_UNMARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); } next.handleRequest(exchange); } } private static class JakartaNamespaceHandler implements HttpHandler { private final HttpHandler next; JakartaNamespaceHandler(HttpHandler next) { this.next = next; } @Override public void handleRequest(HttpServerExchange exchange) throws Exception { // no transformation required whatsoever, just make sure we have a factory set // or else we will see a NPE when trying to use those attachments exchange.putAttachment(HTTP_UNMARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); exchange.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); next.handleRequest(exchange); } } }
common/src/main/java/org/wildfly/httpclient/common/EENamespaceInteroperability.java
/* * JBoss, Home of Professional Open Source. * Copyright 2022 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.wildfly.httpclient.common; import io.undertow.client.ClientCallback; import io.undertow.client.ClientConnection; import io.undertow.client.ClientExchange; import io.undertow.client.ClientRequest; import io.undertow.client.ClientResponse; import io.undertow.client.ContinueNotification; import io.undertow.client.PushCallback; import io.undertow.connector.ByteBufferPool; import io.undertow.server.HttpHandler; import io.undertow.server.HttpServerExchange; import io.undertow.server.handlers.PathHandler; import io.undertow.util.AbstractAttachable; import io.undertow.util.AttachmentKey; import io.undertow.util.AttachmentList; import io.undertow.util.HttpString; import org.wildfly.security.manager.WildFlySecurityManager; import org.xnio.OptionMap; import org.xnio.XnioWorker; import org.xnio.channels.StreamSinkChannel; import org.xnio.channels.StreamSourceChannel; import javax.net.ssl.SSLContext; import java.io.IOException; import java.net.URI; import java.util.List; import static org.jboss.marshalling.ClassNameTransformer.JAVAEE_TO_JAKARTAEE; import static org.wildfly.httpclient.common.HttpMarshallerFactory.DEFAULT_FACTORY; import static org.wildfly.httpclient.common.Protocol.VERSION_ONE_PATH; import static org.wildfly.httpclient.common.Protocol.VERSION_TWO_PATH; /** * EE namespace interoperability implementation for allowing Jakarta EE namespace servers and clients communication with * Javax EE namespace endpoints. * * EE namespace interoperability must be enabled on all Jakarta servers and clients to make communication * among them possible. * * @author Flavia Rainone * @author Richard Opalka */ final class EENamespaceInteroperability { // Batavia transformer sensible constant - it can start with either "javax." or "jakarta." if transformation was performed private static final String VARIABLE_CONSTANT = "jakarta.ejb.FAKE_STRING"; private static final boolean JAKARTAEE_ENVIRONMENT = VARIABLE_CONSTANT.startsWith("jakarta"); /** * Indicates if EE namespace interoperable mode is enabled. */ static final boolean EE_NAMESPACE_INTEROPERABLE_MODE = JAKARTAEE_ENVIRONMENT && Boolean.parseBoolean( WildFlySecurityManager.getPropertyPrivileged("org.wildfly.ee.namespace.interop", "false")); // header indicating the protocol version mode that is being used by the request/response sender private static final HttpString PROTOCOL_VERSION = new HttpString("x-wf-version"); // value for PROTOCOL_VERSION header: used to handshake a higher version, only when both ends use EE jakarta namespace private static final String LATEST_VERSION = String.valueOf(Protocol.LATEST); // key used to attach http marshaller factory to a client request / server exchange private static final AttachmentKey<HttpMarshallerFactory> HTTP_MARSHALLER_FACTORY_KEY = AttachmentKey.create(HttpMarshallerFactory.class); // key used to attach an http unmarshaller factory to a server exchange private static final AttachmentKey<HttpMarshallerFactory> HTTP_UNMARSHALLER_FACTORY_KEY = AttachmentKey.create(HttpMarshallerFactory.class); // marshaller factory to be used when Javax<->Jakarta transformation is needed private static final HttpMarshallerFactory INTEROPERABLE_MARSHALLER_FACTORY = new HttpMarshallerFactory(JAVAEE_TO_JAKARTAEE); static { if (EE_NAMESPACE_INTEROPERABLE_MODE) { HttpClientMessages.MESSAGES.javaeeToJakartaeeBackwardCompatibilityLayerInstalled(); } } private EENamespaceInteroperability() {} /** * Wraps the HTTP server handler into an EE namespace interoperable handler. Such handler implements the * EE namespace interoperability at the server side before delegating to the wrapped {@code httpHandler} * * @param httpHandler the handler to be wrapped * @return handler the ee namespace interoperability handler */ static HttpHandler createInteroperabilityHandler(HttpHandler httpHandler) { return createProtocolVersionHttpHandler(new EENamespaceInteroperabilityHandler(httpHandler), new JakartaNamespaceHandler(httpHandler)); } static HttpHandler createProtocolVersionHttpHandler(HttpHandler interoperabilityHandler, HttpHandler latestProtocolHandler) { final PathHandler versionPathHandler = new PathHandler(); versionPathHandler.addPrefixPath(VERSION_ONE_PATH, interoperabilityHandler); versionPathHandler.addPrefixPath(VERSION_TWO_PATH, latestProtocolHandler); return versionPathHandler; } /** * Returns the HTTPMarshallerFactoryProvider instance responsible for taking care of marshalling * and unmarshalling according to the values negotiated by the ee namespace interoperability headers. * * @return the HTTPMarshallerFactoryProvider. All marshalling and unmarshalling done at both server * and client side have to be done through a factory provided by this object. */ static HttpMarshallerFactoryProvider getHttpMarshallerFactoryProvider() { return new HttpMarshallerFactoryProvider() { @Override public HttpMarshallerFactory getMarshallerFactory(AbstractAttachable attachable) { return attachable.getAttachment(HTTP_MARSHALLER_FACTORY_KEY); } @Override public HttpMarshallerFactory getUnmarshallerFactory(AbstractAttachable attachable) { return attachable.getAttachment(HTTP_UNMARSHALLER_FACTORY_KEY); } }; } /** * Returns the HTTP connection pool factory when EE namespace interoperability mode is on. This factory * creates EE namespace interoperable connections to the server. * * @return the {@link HttpConnectionPoolFactory}. */ static HttpConnectionPoolFactory getHttpConnectionPoolFactory() { return (HttpConnectionPool::new); } /* Client side EE namespace interoperability */ private static class HttpConnectionPool extends org.wildfly.httpclient.common.HttpConnectionPool { private volatile int protocolVersion = -1; protected HttpConnectionPool(int maxConnections, int maxStreamsPerConnection, XnioWorker worker, ByteBufferPool byteBufferPool, OptionMap options, HostPool hostPool, long connectionIdleTimeout) { super(maxConnections, maxStreamsPerConnection, worker, byteBufferPool, options, hostPool, connectionIdleTimeout); } @Override int getProtocolVersion() { return protocolVersion == -1? 1 : protocolVersion; } @Override protected org.wildfly.httpclient.common.HttpConnectionPool.ClientConnectionHolder createClientConnectionHolder(ClientConnection connection, URI uri, SSLContext sslContext) { return new ClientConnectionHolder(connection, uri, sslContext); } protected class ClientConnectionHolder extends org.wildfly.httpclient.common.HttpConnectionPool.ClientConnectionHolder { private ClientConnectionHolder(ClientConnection connection, URI uri, SSLContext sslContext) { super (connection, uri, sslContext); } @Override public void sendRequest(ClientRequest request, ClientCallback<ClientExchange> callback) { switch (protocolVersion) { case -1: // new connection pool: send the protocol version header once with LATEST_VERSION value to see what will be the response request.getRequestHeaders().put(PROTOCOL_VERSION, LATEST_VERSION); request.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); break; case Protocol.JAVAEE_PROTOCOL_VERSION: // connection is Javax EE, so we need to transform class names Javax<->Jakarta request.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); break; case org.wildfly.httpclient.common.Protocol.JAKARTAEE_PROTOCOL_VERSION: default: // connection already set as Jakarta namespace, default factory can be used for marshalling // (no transformation needed) request.getRequestHeaders().put(PROTOCOL_VERSION, LATEST_VERSION); request.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); } super.sendRequest(request, new ClientCallback<ClientExchange>() { @Override public void completed(ClientExchange result) { // wrap the exchange, to handle interoperability at the result (see below) callback.completed(new EEInteroperableClientExchange(result)); } @Override public void failed(IOException e) { callback.failed(e); } }); } private final class EEInteroperableClientExchange implements ClientExchange { private final ClientExchange wrappedExchange; public EEInteroperableClientExchange(ClientExchange clientExchange) { this.wrappedExchange = clientExchange; } @Override public void setResponseListener(final ClientCallback<ClientExchange> responseListener) { wrappedExchange.setResponseListener(new ClientCallback<ClientExchange>() { @Override public void completed(ClientExchange result) { // this method adds the factory to the request instead of response, this is more efficient // we prevent adding when jakartaEE is already true and creating a new entry in the response attachment map final ClientResponse response = result.getResponse(); if (protocolVersion == -1) { // we need to check for protocol version header to define the protocol version of the pool if (LATEST_VERSION.equals(response.getResponseHeaders().getFirst(PROTOCOL_VERSION))) { // this indicates this is the first response server sends, set the protocol to 2 protocolVersion = Protocol.LATEST; // overwrite previous attachment, no transformation is needed for this connection any more result.getRequest().putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); } else { protocolVersion = Protocol.JAVAEE_PROTOCOL_VERSION; //regarding marsh. factory key, do nothing, the connection is not Jakarta and the marshalling factory provider is already interoperable } } // else: do nothing, request already contains the default marshalling factory responseListener.completed(result); } @Override public void failed(IOException e) { responseListener.failed(e); } }); } @Override public void setContinueHandler(ContinueNotification continueHandler) { wrappedExchange.setContinueHandler(continueHandler); } @Override public void setPushHandler(PushCallback pushCallback) { wrappedExchange.setPushHandler(pushCallback); } @Override public StreamSinkChannel getRequestChannel() { return wrappedExchange.getRequestChannel(); } @Override public StreamSourceChannel getResponseChannel() { return wrappedExchange.getResponseChannel(); } @Override public ClientRequest getRequest() { return wrappedExchange.getRequest(); } @Override public ClientResponse getResponse() { return wrappedExchange.getResponse(); } @Override public ClientResponse getContinueResponse() { return wrappedExchange.getContinueResponse(); } @Override public ClientConnection getConnection() { return wrappedExchange.getConnection(); } @Override public <T> T getAttachment(AttachmentKey<T> key) { return wrappedExchange.getAttachment(key); } @Override public <T> List<T> getAttachmentList(AttachmentKey<? extends List<T>> key) { return wrappedExchange.getAttachmentList(key); } @Override public <T> T putAttachment(AttachmentKey<T> key, T value) { return wrappedExchange.putAttachment(key, value); } @Override public <T> T removeAttachment(AttachmentKey<T> key) { return wrappedExchange.removeAttachment(key); } @Override public <T> void addToAttachmentList(AttachmentKey<AttachmentList<T>> key, T value) { wrappedExchange.addToAttachmentList(key, value); } } } } /* Server side EE namespace interoperability */ private static class EENamespaceInteroperabilityHandler implements HttpHandler { private final HttpHandler next; EENamespaceInteroperabilityHandler(HttpHandler next) { this.next = next; } @Override public void handleRequest(HttpServerExchange exchange) throws Exception { if (LATEST_VERSION.equals(exchange.getRequestHeaders().getFirst(PROTOCOL_VERSION))) { // respond that this end also supports version two exchange.getResponseHeaders().add(PROTOCOL_VERSION, LATEST_VERSION); // transformation is required for unmarshalling because client is on EE namespace interoperable mode exchange.putAttachment(HTTP_UNMARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); // no transformation required for marshalling, server is sending response in Jakarta exchange.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); } else { // transformation is required for unmarshalling request and marshalling response, // because server is interoperable mode and the lack of a header indicates this is // either a Javax EE client or a Jakarta EE client that is not interoperable // the latter case will lead to an error when unmarshalling at client side) exchange.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); exchange.putAttachment(HTTP_UNMARSHALLER_FACTORY_KEY, INTEROPERABLE_MARSHALLER_FACTORY); } next.handleRequest(exchange); } } private static class JakartaNamespaceHandler implements HttpHandler { private final HttpHandler next; JakartaNamespaceHandler(HttpHandler next) { this.next = next; } @Override public void handleRequest(HttpServerExchange exchange) throws Exception { // no transformation required whatsoever, just make sure we have a factory set // or else we will see a NPE when trying to use those attachments exchange.putAttachment(HTTP_UNMARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); exchange.putAttachment(HTTP_MARSHALLER_FACTORY_KEY, DEFAULT_FACTORY); next.handleRequest(exchange); } } }
[WEJBHTTP-87] EENamespaceInteroperability's VARIABLE_CONSTANT and JAKARTAEE_ENVIRONMENT are useless now when Jakarta EE API version is the only one available
common/src/main/java/org/wildfly/httpclient/common/EENamespaceInteroperability.java
[WEJBHTTP-87] EENamespaceInteroperability's VARIABLE_CONSTANT and JAKARTAEE_ENVIRONMENT are useless now when Jakarta EE API version is the only one available
Java
apache-2.0
32d1d29b96678f324f837ae01da45d08634ae9d7
0
Qi4j/qi4j-sdk,ramtej/Qi4j.Repo.4.Sync,ramtej/Qi4j.Repo.4.Sync,Qi4j/qi4j-extensions,apache/zest-qi4j,joobn72/qi4j-sdk,apache/zest-qi4j,ramtej/Qi4j.Repo.4.Sync,ramtej/Qi4j.Repo.4.Sync,joobn72/qi4j-sdk,apache/zest-qi4j,joobn72/qi4j-sdk,ramtej/Qi4j.Repo.4.Sync,joobn72/qi4j-sdk,Qi4j/qi4j-sdk,ramtej/Qi4j.Repo.4.Sync,Qi4j/qi4j-sdk,apache/zest-qi4j,apache/zest-qi4j,joobn72/qi4j-sdk,joobn72/qi4j-sdk,Qi4j/qi4j-sdk,Qi4j/qi4j-extensions,Qi4j/qi4j-sdk
/* * Copyright (c) 2010, Paul Merlin. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.qi4j.entitystore.sql; import org.apache.derby.iapi.services.io.FileUtil; import org.junit.Ignore; import org.qi4j.api.common.Visibility; import org.qi4j.api.unitofwork.UnitOfWork; import org.qi4j.bootstrap.AssemblyException; import org.qi4j.bootstrap.ModuleAssembly; import org.qi4j.entitystore.memory.MemoryEntityStoreService; import org.qi4j.entitystore.sql.assembly.DerbySQLEntityStoreAssembler; import org.qi4j.entitystore.sql.internal.SQLs; import org.qi4j.library.sql.common.SQLConfiguration; import org.qi4j.library.sql.common.SQLUtil; import org.qi4j.library.sql.ds.DBCPDataSourceConfiguration; import org.qi4j.test.entity.AbstractEntityStoreTest; import java.sql.Connection; import java.sql.Statement; /** * @author Stanislav Muhametsin * @author Paul Merlin */ @Ignore // Do not work anymore since the use of java-sql-generator that do not support Derby public class DerbySQLEntityStoreTest extends AbstractEntityStoreTest { @Override @SuppressWarnings( "unchecked" ) public void assemble( ModuleAssembly module ) throws AssemblyException { super.assemble( module ); new DerbySQLEntityStoreAssembler().assemble( module ); ModuleAssembly config = module.layer().module( "config" ); config.services( MemoryEntityStoreService.class ); config.entities( DBCPDataSourceConfiguration.class, SQLConfiguration.class ).visibleIn( Visibility.layer ); } @Override public void tearDown() throws Exception { if( unitOfWorkFactory == null ) { return; } UnitOfWork uow = this.unitOfWorkFactory.newUnitOfWork(); try { SQLConfiguration config = uow.get( SQLConfiguration.class, DerbySQLEntityStoreAssembler.ENTITYSTORE_SERVICE_NAME ); Connection connection = SQLUtil.getConnection( serviceLocator ); String schemaName = config.schemaName().get(); if( schemaName == null ) { schemaName = SQLs.DEFAULT_SCHEMA_NAME; } Statement stmt = null; try { stmt = connection.createStatement(); stmt.execute( String.format( "DELETE FROM %s." + SQLs.TABLE_NAME, schemaName ) ); connection.commit(); } finally { SQLUtil.closeQuietly( stmt ); } // String str = config.connectionString().get(); // StringBuilder connectionString = new StringBuilder( str ); // if( !str.contains( ";" ) ) // { // connectionString.append( ";" ); // } // connectionString.append( "shutdown=true" ); FileUtil.removeDirectory( "target/qi4j-data" ); } finally { uow.discard(); super.tearDown(); } } }
entitystore-sql/src/test/java/org/qi4j/entitystore/sql/DerbySQLEntityStoreTest.java
/* * Copyright (c) 2010, Paul Merlin. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package org.qi4j.entitystore.sql; import java.sql.Connection; import java.sql.Statement; import org.apache.derby.iapi.services.io.FileUtil; import org.junit.Assume; import org.qi4j.api.common.Visibility; import org.qi4j.api.unitofwork.UnitOfWork; import org.qi4j.bootstrap.AssemblyException; import org.qi4j.bootstrap.ModuleAssembly; import org.qi4j.entitystore.memory.MemoryEntityStoreService; import org.qi4j.entitystore.sql.assembly.DerbySQLEntityStoreAssembler; import org.qi4j.entitystore.sql.internal.SQLs; import org.qi4j.library.sql.common.SQLConfiguration; import org.qi4j.library.sql.common.SQLUtil; import org.qi4j.library.sql.ds.DBCPDataSourceConfiguration; import org.qi4j.test.entity.AbstractEntityStoreTest; /** * @author Stanislav Muhametsin * @author Paul Merlin */ public class DerbySQLEntityStoreTest extends AbstractEntityStoreTest { @Override @SuppressWarnings( "unchecked" ) public void assemble( ModuleAssembly module ) throws AssemblyException { super.assemble( module ); try { new DerbySQLEntityStoreAssembler().assemble( module ); } catch( AssemblyException e ) { System.err.println( "TEST WILL BE SKIPPED!!!!!" ); Assume.assumeNoException( e ); } ModuleAssembly config = module.layer().module( "config" ); config.services( MemoryEntityStoreService.class ); config.entities( DBCPDataSourceConfiguration.class, SQLConfiguration.class ).visibleIn( Visibility.layer ); } @Override public void tearDown() throws Exception { if( unitOfWorkFactory == null ) { return; } UnitOfWork uow = this.unitOfWorkFactory.newUnitOfWork(); try { SQLConfiguration config = uow.get( SQLConfiguration.class, DerbySQLEntityStoreAssembler.ENTITYSTORE_SERVICE_NAME ); Connection connection = SQLUtil.getConnection( serviceLocator ); String schemaName = config.schemaName().get(); if( schemaName == null ) { schemaName = SQLs.DEFAULT_SCHEMA_NAME; } Statement stmt = null; try { stmt = connection.createStatement(); stmt.execute( String.format( "DELETE FROM %s." + SQLs.TABLE_NAME, schemaName ) ); connection.commit(); } finally { SQLUtil.closeQuietly( stmt ); } // String str = config.connectionString().get(); // StringBuilder connectionString = new StringBuilder( str ); // if( !str.contains( ";" ) ) // { // connectionString.append( ";" ); // } // connectionString.append( "shutdown=true" ); FileUtil.removeDirectory( "target/qi4j-data" ); } finally { uow.discard(); super.tearDown(); } } }
entitystore-sql: add @Ignore to DerbySQLEntityStoreTest Do not work anymore since the refactor to use java-sql-generator
entitystore-sql/src/test/java/org/qi4j/entitystore/sql/DerbySQLEntityStoreTest.java
entitystore-sql: add @Ignore to DerbySQLEntityStoreTest
Java
apache-2.0
630a729ca86fb8aa759dc5af4109aafe96cd1964
0
researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds,researchstudio-sat/webofneeds
package won.protocol.util; import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; import com.hp.hpl.jena.query.*; import com.hp.hpl.jena.rdf.model.*; import com.hp.hpl.jena.rdf.model.impl.PropertyImpl; import com.hp.hpl.jena.rdf.model.impl.ResourceImpl; import com.hp.hpl.jena.sparql.path.Path; import com.hp.hpl.jena.sparql.path.PathParser; import com.hp.hpl.jena.vocabulary.RDF; import org.apache.camel.component.dataset.DataSet; import org.apache.jena.riot.Lang; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import won.protocol.exception.DataIntegrityException; import won.protocol.exception.IncorrectPropertyCountException; import won.protocol.message.WonMessage; import won.protocol.message.WonSignatureData; import won.protocol.model.ConnectionState; import won.protocol.model.Facet; import won.protocol.model.Match; import won.protocol.model.NeedState; import won.protocol.service.WonNodeInfo; import won.protocol.vocabulary.SFSIG; import won.protocol.vocabulary.WON; import won.protocol.vocabulary.WONMSG; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import static won.protocol.util.RdfUtils.findOrCreateBaseResource; /** * Utilities for populating/manipulating the RDF models used throughout the WON application. */ public class WonRdfUtils { public static final String NAMED_GRAPH_SUFFIX = "#data"; private static final Logger logger = LoggerFactory.getLogger(WonRdfUtils.class); public static class SignatureUtils { public static boolean isSignatureGraph(String graphUri, Model model) { // TODO check the presence of all the required triples Resource resource = model.getResource(graphUri); StmtIterator si = model.listStatements(resource, RDF.type, SFSIG.SIGNATURE); if (si.hasNext()) { return true; } return false; } public static boolean isSignature(Model model, String modelName) { // TODO check the presence of all the required triples return model.contains(model.getResource(modelName), RDF.type, SFSIG.SIGNATURE); } public static String getSignedGraphUri(String signatureGraphUri, Model signatureGraph) { String signedGraphUri = null; Resource resource = signatureGraph.getResource(signatureGraphUri); NodeIterator ni = signatureGraph.listObjectsOfProperty(resource, WONMSG.HAS_SIGNED_GRAPH_PROPERTY); if (ni.hasNext()) { signedGraphUri = ni.next().asResource().getURI(); } return signedGraphUri; } public static String getSignatureValue(String signatureGraphUri, Model signatureGraph) { String signatureValue = null; Resource resource = signatureGraph.getResource(signatureGraphUri); NodeIterator ni2 = signatureGraph.listObjectsOfProperty(resource, SFSIG.HAS_SIGNATURE_VALUE); if (ni2.hasNext()) { signatureValue = ni2.next().asLiteral().toString(); } return signatureValue; } public static WonSignatureData extractWonSignatureData(final String uri, final Model model) { return extractWonSignatureData(model.getResource(uri)); } public static WonSignatureData extractWonSignatureData(final Resource resource) { Statement stmt = resource.getRequiredProperty(WONMSG.HAS_SIGNED_GRAPH_PROPERTY); String signedGraphUri = stmt.getObject().asResource().getURI(); stmt = resource.getRequiredProperty(SFSIG.HAS_SIGNATURE_VALUE); String signatureValue = stmt.getObject().asLiteral().getString(); stmt = resource.getRequiredProperty(WONMSG.HAS_HASH_PROPERTY); String hash = stmt.getObject().asLiteral().getString(); stmt = resource.getRequiredProperty(WONMSG.HAS_PUBLIC_KEY_FINGERPRINT_PROPERTY); String fingerprint = stmt.getObject().asLiteral().getString(); stmt = resource.getRequiredProperty(SFSIG.HAS_VERIFICATION_CERT); String cert = stmt.getObject().asResource().getURI(); return new WonSignatureData(signedGraphUri, resource.getURI(), signatureValue, hash, fingerprint, cert); } /** * Adds the triples holding the signature data to the model of the specified resource, using the resource as the * subject. * @param subject * @param wonSignatureData */ public static void addSignature(Resource subject, WonSignatureData wonSignatureData){ assert wonSignatureData.getHash() != null; assert wonSignatureData.getSignatureValue() != null; assert wonSignatureData.getPublicKeyFingerprint() != null; assert wonSignatureData.getSignedGraphUri() != null; assert wonSignatureData.getVerificationCertificateUri() != null; Model containingGraph = subject.getModel(); subject.addProperty(RDF.type, SFSIG.SIGNATURE); subject.addProperty(WONMSG.HAS_HASH_PROPERTY, wonSignatureData.getHash()); subject.addProperty(SFSIG.HAS_SIGNATURE_VALUE, wonSignatureData.getSignatureValue()); subject.addProperty(WONMSG.HAS_SIGNED_GRAPH_PROPERTY, containingGraph.createResource(wonSignatureData.getSignedGraphUri())); subject.addProperty(WONMSG.HAS_PUBLIC_KEY_FINGERPRINT_PROPERTY, wonSignatureData.getPublicKeyFingerprint()); subject.addProperty(SFSIG.HAS_VERIFICATION_CERT, containingGraph.createResource(wonSignatureData .getVerificationCertificateUri())); } } public static class WonNodeUtils { /** * Creates a WonNodeInfo object based on the specified dataset. The first model * found in the dataset that seems to contain the data needed for a WonNodeInfo * object is used. * @param wonNodeUri * @param dataset * @return */ public static WonNodeInfo getWonNodeInfo(final URI wonNodeUri, Dataset dataset){ assert wonNodeUri != null: "wonNodeUri must not be null"; assert dataset != null: "dataset must not be null"; return RdfUtils.findFirst(dataset, new RdfUtils.ModelVisitor<WonNodeInfo>() { @Override public WonNodeInfo visit(final Model model) { //use the first blank node found for [wonNodeUri] won:hasUriPatternSpecification [blanknode] NodeIterator it = model.listObjectsOfProperty(model.getResource(wonNodeUri.toString()), WON.HAS_URI_PATTERN_SPECIFICATION); if (!it.hasNext()) return null; WonNodeInfo wonNodeInfo = new WonNodeInfo(); wonNodeInfo.setWonNodeURI(wonNodeUri.toString()); RDFNode node = it.next(); // set the URI prefixes it = model.listObjectsOfProperty(node.asResource(), WON.HAS_NEED_URI_PREFIX); if (! it.hasNext() ) return null; wonNodeInfo.setNeedURIPrefix(it.next().asLiteral().getString()); it = model.listObjectsOfProperty(node.asResource(), WON.HAS_CONNECTION_URI_PREFIX); if (! it.hasNext() ) return null; wonNodeInfo.setConnectionURIPrefix(it.next().asLiteral().getString()); it = model.listObjectsOfProperty(node.asResource(), WON.HAS_EVENT_URI_PREFIX); if (! it.hasNext() ) return null; wonNodeInfo.setEventURIPrefix(it.next().asLiteral().getString()); // set the need list URI it = model.listObjectsOfProperty(model.getResource(wonNodeUri.toString()), WON.HAS_NEED_LIST); if (it.hasNext() ) { wonNodeInfo.setNeedListURI(it.next().asNode().getURI()); } else { wonNodeInfo.setNeedListURI(wonNodeInfo.getNeedURIPrefix()); } // set the supported protocol implementations String queryString = "SELECT ?protocol ?param ?value WHERE { ?a <%s> ?c. " + "?c <%s> ?protocol. ?c ?param ?value. FILTER ( ?value != ?protocol ) }"; queryString = String.format(queryString, WON.SUPPORTS_WON_PROTOCOL_IMPL.toString(), RDF.getURI() + "type"); Query protocolQuery = QueryFactory.create(queryString); QueryExecution qexec = QueryExecutionFactory.create(protocolQuery, model); ResultSet rs = qexec.execSelect(); while (rs.hasNext()) { QuerySolution qs = rs.nextSolution(); String protocol = rdfNodeToString(qs.get("protocol")); String param = rdfNodeToString(qs.get("param")); String value = rdfNodeToString(qs.get("value")); wonNodeInfo.setSupportedProtocolImplParamValue(protocol, param, value); } return wonNodeInfo; } }); } private static String rdfNodeToString(RDFNode node) { if (node.isLiteral()) { return node.asLiteral().getString(); } else if (node.isResource()) { return node.asResource().getURI(); } return null; } } public static class MessageUtils { /** * Adds the specified text as a won:hasTextMessage to the model's base resource. * @param message * @return */ public static Model addMessage(Model model, String message) { Resource baseRes = RdfUtils.findOrCreateBaseResource(model); baseRes.addProperty(WON.HAS_TEXT_MESSAGE, message, XSDDatatype.XSDstring); return model; } /** * Creates an RDF model containing a text message. * @param message * @return */ public static Model textMessage(String message) { Model messageModel = createModelWithBaseResource(); Resource baseRes = messageModel.createResource(messageModel.getNsPrefixURI("")); baseRes.addProperty(WON.HAS_TEXT_MESSAGE,message, XSDDatatype.XSDstring); return messageModel; } /** * Creates an RDF model containing a generic message. * * @return */ public static Model genericMessage(URI predicate, URI object) { return genericMessage(new PropertyImpl(predicate.toString()), new ResourceImpl(object.toString())); } /** * Creates an RDF model containing a generic message. * * @return */ public static Model genericMessage(Property predicate, Resource object) { Model messageModel = createModelWithBaseResource(); Resource baseRes = RdfUtils.getBaseResource(messageModel); baseRes.addProperty(RDF.type, WONMSG.TYPE_CONNECTION_MESSAGE); baseRes.addProperty(predicate, object); return messageModel; } /** * Creates an RDF model containing a feedback message referring to the specified resource * that is either positive or negative. * @return */ public static Model binaryFeedbackMessage(URI forResource, boolean isFeedbackPositive) { Model messageModel = createModelWithBaseResource(); Resource baseRes = RdfUtils.getBaseResource(messageModel); Resource feedbackNode = messageModel.createResource(); baseRes.addProperty(WON.HAS_FEEDBACK, feedbackNode); feedbackNode.addProperty(WON.HAS_BINARY_RATING, isFeedbackPositive ? WON.GOOD : WON.BAD); feedbackNode.addProperty(WON.FOR_RESOURCE, messageModel.createResource(forResource.toString())); return messageModel; } /** * Returns the first won:hasTextMessage object, or null if none is found. * Won't work on WonMessage models, removal depends on refactoring of BA facet code * @param model * @return */ @Deprecated public static String getTextMessage(Model model){ Statement stmt = model.getProperty(RdfUtils.getBaseResource(model),WON.HAS_TEXT_MESSAGE); if (stmt != null) { return stmt.getObject().asLiteral().getLexicalForm(); } return null; } /** * Returns the first won:hasTextMessage object, or null if none is found. * @param wonMessage * @return */ public static String getTextMessage(final WonMessage wonMessage){ return RdfUtils.findFirst(wonMessage.getCompleteDataset(), new RdfUtils.ModelVisitor<String>() { @Override public String visit(Model model) { Statement stmt = model.getProperty(model.getResource(wonMessage.getMessageURI().toString()), WON.HAS_TEXT_MESSAGE); if (stmt != null) { return stmt.getObject().asLiteral().getLexicalForm(); } URI remoteMessageURI = wonMessage.getCorrespondingRemoteMessageURI(); if (remoteMessageURI != null){ stmt = model.getProperty(model.getResource(remoteMessageURI.toString()), WON.HAS_TEXT_MESSAGE); if (stmt != null) { return stmt.getObject().asLiteral().getLexicalForm(); } } return null; } }); } /** * Converts the specified hint message into a Match object. * @param wonMessage * @return a match object or null if the message is not a hint message. */ public static Match toMatch(final WonMessage wonMessage) { if (!WONMSG.TYPE_HINT.equals(wonMessage.getMessageType().getResource())){ return null; } Match match = new Match(); match.setFromNeed(wonMessage.getReceiverNeedURI()); Dataset messageContent = wonMessage.getMessageContent(); RDFNode score = RdfUtils.findOnePropertyFromResource(messageContent, wonMessage.getMessageURI(), WON.HAS_MATCH_SCORE); if (!score.isLiteral()) return null; match.setScore(score.asLiteral().getDouble()); RDFNode counterpart = RdfUtils.findOnePropertyFromResource(messageContent, wonMessage.getMessageURI(), WON.HAS_MATCH_COUNTERPART); if (!counterpart.isResource()) return null; match.setToNeed(URI.create(counterpart.asResource().getURI())); return match; } public static WonMessage copyByDatasetSerialization(final WonMessage toWrap) { WonMessage copied = new WonMessage(RdfUtils.readDatasetFromString( RdfUtils.writeDatasetToString(toWrap.getCompleteDataset(), Lang.TRIG) ,Lang.TRIG)); return copied; } } public static class FacetUtils { public static URI getFacet(WonMessage message){ URI uri = getObjectOfMessageProperty(message, WON.HAS_FACET); if (uri == null) { uri = getObjectOfRemoteMessageProperty(message, WON.HAS_REMOTE_FACET); } return uri; } public static URI getRemoteFacet(WonMessage message) { URI uri = getObjectOfMessageProperty(message, WON.HAS_REMOTE_FACET); if (uri == null) { uri = getObjectOfRemoteMessageProperty(message, WON.HAS_FACET); } return uri; } /** * Returns a property of the message (i.e. the object of the first triple ( [message-uri] [property] X ) * found in one of the content graphs of the specified message. */ private static URI getObjectOfMessageProperty(final WonMessage message, final Property property) { List<String> contentGraphUris = message.getContentGraphURIs(); Dataset contentGraphs = message.getMessageContent(); URI messageURI = message.getMessageURI(); for (String graphUri: contentGraphUris) { Model contentGraph = contentGraphs.getNamedModel(graphUri); StmtIterator smtIter = contentGraph.getResource(messageURI.toString()).listProperties(property); if (smtIter.hasNext()) { return URI.create(smtIter.nextStatement().getObject().asResource().getURI()); } } return null; } /** * Returns a property of the corresponding remote message (i.e. the object of the first triple ( * [corresponding-remote-message-uri] [property] X ) * found in one of the content graphs of the specified message. */ private static URI getObjectOfRemoteMessageProperty(final WonMessage message, final Property property) { List<String> contentGraphUris = message.getContentGraphURIs(); Dataset contentGraphs = message.getMessageContent(); URI messageURI = message.getCorrespondingRemoteMessageURI(); if (messageURI != null) { for (String graphUri : contentGraphUris) { Model contentGraph = contentGraphs.getNamedModel(graphUri); StmtIterator smtIter = contentGraph.getResource(messageURI.toString()).listProperties(property); if (smtIter.hasNext()) { return URI.create(smtIter.nextStatement().getObject().asResource().getURI()); } } } return null; } /** * Returns all facets found in the model, attached to the null relative URI '<>'. * Returns an empty collection if there is no such facet. * @param content * @return */ public static Collection<URI> getFacets(Model content) { Resource baseRes = RdfUtils.getBaseResource(content); StmtIterator stmtIterator = baseRes.listProperties(WON.HAS_FACET); LinkedList<URI> ret = new LinkedList<URI>(); while (stmtIterator.hasNext()){ RDFNode object = stmtIterator.nextStatement().getObject(); if (object.isURIResource()){ ret.add(URI.create(object.asResource().getURI())); } } return ret; } /** * Adds a triple to the model of the form <> won:hasFacet [facetURI]. * @param content * @param facetURI */ public static void addFacet(final Model content, final URI facetURI) { Resource baseRes = RdfUtils.getBaseResource(content); baseRes.addProperty(WON.HAS_FACET, content.createResource(facetURI.toString())); } /** * Adds a triple to the model of the form <> won:hasRemoteFacet [facetURI]. * @param content * @param facetURI */ public static void addRemoteFacet(final Model content, final URI facetURI) { Resource baseRes = RdfUtils.getBaseResource(content); baseRes.addProperty(WON.HAS_REMOTE_FACET, content.createResource(facetURI.toString())); } /** * Creates a model for connecting two facets.CONNECTED.getURI().equals(connectionState) * @return */ public static Model createFacetModelForHintOrConnect(URI facet, URI remoteFacet) { Model model = ModelFactory.createDefaultModel(); Resource baseResource = findOrCreateBaseResource(model); WonRdfUtils.FacetUtils.addFacet(model, facet); WonRdfUtils.FacetUtils.addRemoteFacet(model, remoteFacet); logger.debug("facet model contains these facets: from:{} to:{}", facet, remoteFacet); return model; } } public static class ConnectionUtils { public static boolean isConnected(Dataset connectionDataset, URI connectionUri) { URI connectionState = getConnectionState(connectionDataset, connectionUri); return ConnectionState.CONNECTED.getURI().equals(connectionState); } public static URI getConnectionState(Dataset connectionDataset, URI connectionUri) { Path statePath = PathParser.parse("won:hasConnectionState", DefaultPrefixUtils.getDefaultPrefixes()); return RdfUtils.getURIPropertyForPropertyPath(connectionDataset, connectionUri, statePath); } } public static class NeedUtils { public static URI queryWonNode(Dataset content) { URI wonNodeURI = null; final String queryString = "PREFIX won: <http://purl.org/webofneeds/model#> " + "SELECT * { { ?s won:hasWonNode ?wonNode } UNION { GRAPH ?g { ?s won:hasWonNode ?wonNode } } }"; Query query = QueryFactory.create(queryString); try (QueryExecution qexec = QueryExecutionFactory.create(query, content)) { ResultSet results = qexec.execSelect(); boolean foundOneResult = false; for (; results.hasNext(); ) { if (foundOneResult) throw new IncorrectPropertyCountException(1,2); foundOneResult = true; QuerySolution solution = results.nextSolution(); Resource r = solution.getResource("wonNode"); try { wonNodeURI = new URI(r.getURI()); } catch (URISyntaxException e) { logger.warn("caught URISyntaxException:", e); throw new DataIntegrityException("could not parse wonNodeUri: " + r.getURI(), e); } } } return wonNodeURI; } public static NeedState queryActiveStatus(Model model, URI needURI) { StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.IS_IN_STATE, (RDFNode) null); if (!iterator.hasNext()) return null; NeedState result = null; while (iterator.hasNext()) { Statement s = iterator.nextStatement(); if (s.getObject().equals(WON.NEED_STATE_ACTIVE)) { if (result != null && result.equals(NeedState.INACTIVE)) throw new IncorrectPropertyCountException("More than one result found, but only one expected", 1,2); result = NeedState.ACTIVE; } else if (s.getObject().equals(WON.NEED_STATE_INACTIVE)) { if (result != null && result.equals(NeedState.ACTIVE)) throw new IncorrectPropertyCountException("More than one result found, but only one expected", 1,2); result = NeedState.INACTIVE; } } return result; } public static NeedState queryActiveStatus(Dataset content, final URI needURI) { return RdfUtils.findOne(content, new RdfUtils.ModelVisitor<NeedState>() { @Override public NeedState visit(final Model model) { return queryActiveStatus(model, needURI); } }, true); } /** * returns a list of Facet objects each set with the NeedURI and the TypeURI * * @param needURI URI which will be set to the facets * @param dataset <code>Dataset</code> object which will be searched for the facets * @return list of facets */ public static List<Facet> getFacets(final URI needURI, Dataset dataset) { return RdfUtils.visitFlattenedToList(dataset, new RdfUtils.ModelVisitor<List<Facet>>() { @Override public List<Facet> visit(final Model model) { return getFacets(needURI, model); } }); } /** * returns a list of Facet objects each set with the NeedURI and the TypeURI * * @param needURI URI which will be set to the facets * @param model <code>Model</code> object which will be searched for the facets * @return list of facets */ public static List<Facet> getFacets(URI needURI, Model model) { List<Facet> result = new ArrayList<Facet>(); StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.HAS_FACET, (RDFNode) null); while (iterator.hasNext()) { Facet f = new Facet(); f.setNeedURI(needURI); f.setTypeURI(URI.create(iterator.nextStatement().getObject().asResource().getURI())); result.add(f); } return result; } /** * searches for a subject of type won:Need and returns the NeedURI * * @param dataset <code>Dataset</code> object which will be searched for the NeedURI * @return <code>URI</code> which is of type won:Need */ public static URI getNeedURI(Dataset dataset) { return RdfUtils.findOne(dataset, new RdfUtils.ModelVisitor<URI>() { @Override public URI visit(final Model model) { return getNeedURI(model); } }, true); } /** * searches for a subject of type won:Need and returns the NeedURI * * @param model <code>Model</code> object which will be searched for the NeedURI * @return <code>URI</code> which is of type won:Need */ public static URI getNeedURI(Model model) { List<URI> needURIs = new ArrayList<URI>(); ResIterator iterator = model.listSubjectsWithProperty(RDF.type, WON.NEED); while (iterator.hasNext()) { needURIs.add(URI.create(iterator.next().getURI())); } if (needURIs.size() == 0) return null; else if (needURIs.size() == 1) return needURIs.get(0); else if (needURIs.size() > 1) { URI u = needURIs.get(0); for (URI uri : needURIs) { if (!uri.equals(u)) throw new IncorrectPropertyCountException(1,2); } return u; } else return null; } /** * return the needURI of a connection * * @param dataset <code>Dataset</code> object which contains connection information * @param connectionURI * @return <code>URI</code> of the need */ public static URI getLocalNeedURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.BELONGS_TO_NEED).asResource().getURI()); } public static URI getRemoteNeedURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.HAS_REMOTE_NEED).asResource().getURI()); } public static URI getWonNodeURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.HAS_WON_NODE).asResource().getURI()); } public static URI getRemoteConnectionURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.HAS_REMOTE_CONNECTION).asResource().getURI()); } public static URI getWonNodeURIFromNeed(Dataset dataset, final URI needURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, needURI, WON.HAS_WON_NODE).asResource().getURI()); } /** * Extracts all triples from the dataset (which is expected to be a dataset describing * one need, expressed in multiple named graphs) and copies them to a new model. * @param dataset * @return */ public static Model getNeedModelFromNeedDataset(Dataset dataset){ assert dataset != null : "dataset must not be null"; final Model result = ModelFactory.createDefaultModel(); RdfUtils.visit(dataset,new RdfUtils.ModelVisitor<Object>() { @Override public Object visit(Model model) { result.add(model); return null; } }); return result; } public static URI queryConnectionContainer(Dataset dataset, final URI needURI) { return RdfUtils.findOne(dataset, new RdfUtils.ModelVisitor<URI>() { @Override public URI visit(final Model model) { return queryConnectionContainer(model, needURI); } }, true); } public static URI queryConnectionContainer(Model model, URI needURI) { StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.HAS_CONNECTIONS, (RDFNode) null); if (!iterator.hasNext()) { return null; } URI result = null; while (iterator.hasNext()) { Statement s = iterator.nextStatement(); URI nextURI = URI.create(s.getResource().getURI()); if (result != null && !result.equals(nextURI)) throw new IncorrectPropertyCountException(1,2); result = nextURI; } return result; } public static void removeConnectionContainer(Dataset dataset, final URI needURI) { RdfUtils.visit(dataset, new RdfUtils.ModelVisitor<Object>() { @Override public Object visit(final Model model) { removeConnectionContainer(model, needURI); return null; } }); } public static void removeConnectionContainer(Model model, URI needURI) { StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.HAS_CONNECTIONS, (RDFNode) null); URI result = null; while (iterator.hasNext()) { model.remove(iterator.nextStatement()); } } public static String getNeedTitle(Dataset needDataset, URI needUri) { Path titlePath = PathParser.parse("won:hasContent/dc:title", DefaultPrefixUtils.getDefaultPrefixes()); String titleString = RdfUtils.getStringPropertyForPropertyPath(needDataset, needUri, titlePath); return titleString; } /** * Checks if the need has set a certain flag set * * @param dataset need dataset * @param needURI URI of the need * @param flag Resource flag to be tested * @return true if flag is there, false otherwise */ public static boolean hasFlag(Dataset dataset, String needURI, Resource flag) { Boolean ret = RdfUtils.findFirst(dataset, new RdfUtils.ModelVisitor<Boolean>() { @Override public Boolean visit(Model model) { Resource needResource = model.getResource(needURI); boolean test = needResource.hasProperty(WON.HAS_FLAG, flag); if (test) { return true; } else { return null; } } }); if (ret == null) { ret = false; } return ret; } public static Float getLocationLatitude(Model need, URI needUri) { Path propertyPath = PathParser.parse("won:hasContent/won:hasContentDescription/won:hasLocation/<s:geo>/<s:latitude>", DefaultPrefixUtils.getDefaultPrefixes()); Float latitude = null; String lat = RdfUtils.getStringPropertyForPropertyPath(need, needUri, propertyPath); if (lat != null) { latitude = new Float(lat); } return latitude; } public static Float getLocationLongitude(Model need, URI needUri) { Path propertyPath = PathParser.parse("won:hasContent/won:hasContentDescription/won:hasLocation/<s:geo>/<s:longitude>", DefaultPrefixUtils.getDefaultPrefixes()); Float longitude = null; String lon = RdfUtils.getStringPropertyForPropertyPath(need, needUri, propertyPath); if (lon != null) { longitude = new Float(lon); } return longitude; } public static List<String> getTags(Dataset needDataset) { List<String> tags = new LinkedList<>(); Model model = NeedUtils.getNeedModelFromNeedDataset(needDataset); URI needURI = NeedUtils.getNeedURI(needDataset); Resource needContent = model.getResource(needURI.toString()).getProperty(WON.HAS_CONTENT).getResource(); StmtIterator it = needContent.listProperties(WON.HAS_TAG); while (it.hasNext()) { Statement stmt = it.next(); RDFNode obj = stmt.getObject(); if (obj.isLiteral()) { tags.add(obj.asLiteral().getString()); } } return tags; } public static String getNeedTitle(Dataset needDataset) { URI needUri = NeedUtils.getNeedURI(needDataset); return getNeedTitle(needDataset, needUri); } public static String getNeedDescription(Dataset needDataset) { Path descriptionPath = PathParser.parse("won:hasContent/won:hasTextDescription", DefaultPrefixUtils.getDefaultPrefixes()); URI needUri = NeedUtils.getNeedURI(needDataset); return RdfUtils.getStringPropertyForPropertyPath(needDataset, needUri, descriptionPath); } public static Resource getNeedResource(final Model needModel) { assert needModel != null : "needModel must not be null"; Resource needResource = null; //try fetching the base URI resource. If that is a Need, we'll assume we found the need resource String baseUri = needModel.getNsPrefixURI(""); if (baseUri != null) { //fetch the resource, check if it has the rdf:type won:Need needResource = needModel.getResource(baseUri); if (!needResource.hasProperty(RDF.type, WON.NEED)) { needResource = null; } } if (needResource != null) return needResource; //found no need resource yet. Try to find it by type. We expect to find exactly one, otherwise we report an error ResIterator it = needModel.listSubjectsWithProperty(RDF.type, WON.NEED); if (it.hasNext()) needResource = it.next(); if (it.hasNext()) throw new IllegalArgumentException("expecting only one resource of type won:Need in specified model"); if (needResource == null) throw new IllegalArgumentException("expected to find a resource of type won:Need in specified model"); return needResource; } } private static Model createModelWithBaseResource() { Model model = ModelFactory.createDefaultModel(); model.setNsPrefix("", "no:uri"); model.createResource(model.getNsPrefixURI("")); return model; } }
webofneeds/won-core/src/main/java/won/protocol/util/WonRdfUtils.java
package won.protocol.util; import com.hp.hpl.jena.datatypes.xsd.XSDDatatype; import com.hp.hpl.jena.query.*; import com.hp.hpl.jena.rdf.model.*; import com.hp.hpl.jena.rdf.model.impl.PropertyImpl; import com.hp.hpl.jena.rdf.model.impl.ResourceImpl; import com.hp.hpl.jena.sparql.path.Path; import com.hp.hpl.jena.sparql.path.PathParser; import com.hp.hpl.jena.vocabulary.RDF; import org.apache.camel.component.dataset.DataSet; import org.apache.jena.riot.Lang; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import won.protocol.exception.DataIntegrityException; import won.protocol.exception.IncorrectPropertyCountException; import won.protocol.message.WonMessage; import won.protocol.message.WonSignatureData; import won.protocol.model.ConnectionState; import won.protocol.model.Facet; import won.protocol.model.Match; import won.protocol.model.NeedState; import won.protocol.service.WonNodeInfo; import won.protocol.vocabulary.SFSIG; import won.protocol.vocabulary.WON; import won.protocol.vocabulary.WONMSG; import java.net.URI; import java.net.URISyntaxException; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedList; import java.util.List; import static won.protocol.util.RdfUtils.findOrCreateBaseResource; /** * Utilities for populating/manipulating the RDF models used throughout the WON application. */ public class WonRdfUtils { public static final String NAMED_GRAPH_SUFFIX = "#data"; private static final Logger logger = LoggerFactory.getLogger(WonRdfUtils.class); public static class SignatureUtils { public static boolean isSignatureGraph(String graphUri, Model model) { // TODO check the presence of all the required triples Resource resource = model.getResource(graphUri); StmtIterator si = model.listStatements(resource, RDF.type, SFSIG.SIGNATURE); if (si.hasNext()) { return true; } return false; } public static boolean isSignature(Model model, String modelName) { // TODO check the presence of all the required triples return model.contains(model.getResource(modelName), RDF.type, SFSIG.SIGNATURE); } public static String getSignedGraphUri(String signatureGraphUri, Model signatureGraph) { String signedGraphUri = null; Resource resource = signatureGraph.getResource(signatureGraphUri); NodeIterator ni = signatureGraph.listObjectsOfProperty(resource, WONMSG.HAS_SIGNED_GRAPH_PROPERTY); if (ni.hasNext()) { signedGraphUri = ni.next().asResource().getURI(); } return signedGraphUri; } public static String getSignatureValue(String signatureGraphUri, Model signatureGraph) { String signatureValue = null; Resource resource = signatureGraph.getResource(signatureGraphUri); NodeIterator ni2 = signatureGraph.listObjectsOfProperty(resource, SFSIG.HAS_SIGNATURE_VALUE); if (ni2.hasNext()) { signatureValue = ni2.next().asLiteral().toString(); } return signatureValue; } public static WonSignatureData extractWonSignatureData(final String uri, final Model model) { return extractWonSignatureData(model.getResource(uri)); } public static WonSignatureData extractWonSignatureData(final Resource resource) { Statement stmt = resource.getRequiredProperty(WONMSG.HAS_SIGNED_GRAPH_PROPERTY); String signedGraphUri = stmt.getObject().asResource().getURI(); stmt = resource.getRequiredProperty(SFSIG.HAS_SIGNATURE_VALUE); String signatureValue = stmt.getObject().asLiteral().getString(); stmt = resource.getRequiredProperty(WONMSG.HAS_HASH_PROPERTY); String hash = stmt.getObject().asLiteral().getString(); stmt = resource.getRequiredProperty(WONMSG.HAS_PUBLIC_KEY_FINGERPRINT_PROPERTY); String fingerprint = stmt.getObject().asLiteral().getString(); stmt = resource.getRequiredProperty(SFSIG.HAS_VERIFICATION_CERT); String cert = stmt.getObject().asResource().getURI(); return new WonSignatureData(signedGraphUri, resource.getURI(), signatureValue, hash, fingerprint, cert); } /** * Adds the triples holding the signature data to the model of the specified resource, using the resource as the * subject. * @param subject * @param wonSignatureData */ public static void addSignature(Resource subject, WonSignatureData wonSignatureData){ assert wonSignatureData.getHash() != null; assert wonSignatureData.getSignatureValue() != null; assert wonSignatureData.getPublicKeyFingerprint() != null; assert wonSignatureData.getSignedGraphUri() != null; assert wonSignatureData.getVerificationCertificateUri() != null; Model containingGraph = subject.getModel(); subject.addProperty(RDF.type, SFSIG.SIGNATURE); subject.addProperty(WONMSG.HAS_HASH_PROPERTY, wonSignatureData.getHash()); subject.addProperty(SFSIG.HAS_SIGNATURE_VALUE, wonSignatureData.getSignatureValue()); subject.addProperty(WONMSG.HAS_SIGNED_GRAPH_PROPERTY, containingGraph.createResource(wonSignatureData.getSignedGraphUri())); subject.addProperty(WONMSG.HAS_PUBLIC_KEY_FINGERPRINT_PROPERTY, wonSignatureData.getPublicKeyFingerprint()); subject.addProperty(SFSIG.HAS_VERIFICATION_CERT, containingGraph.createResource(wonSignatureData .getVerificationCertificateUri())); } } public static class WonNodeUtils { /** * Creates a WonNodeInfo object based on the specified dataset. The first model * found in the dataset that seems to contain the data needed for a WonNodeInfo * object is used. * @param wonNodeUri * @param dataset * @return */ public static WonNodeInfo getWonNodeInfo(final URI wonNodeUri, Dataset dataset){ assert wonNodeUri != null: "wonNodeUri must not be null"; assert dataset != null: "dataset must not be null"; return RdfUtils.findFirst(dataset, new RdfUtils.ModelVisitor<WonNodeInfo>() { @Override public WonNodeInfo visit(final Model model) { //use the first blank node found for [wonNodeUri] won:hasUriPatternSpecification [blanknode] NodeIterator it = model.listObjectsOfProperty(model.getResource(wonNodeUri.toString()), WON.HAS_URI_PATTERN_SPECIFICATION); if (!it.hasNext()) return null; WonNodeInfo wonNodeInfo = new WonNodeInfo(); wonNodeInfo.setWonNodeURI(wonNodeUri.toString()); RDFNode node = it.next(); // set the URI prefixes it = model.listObjectsOfProperty(node.asResource(), WON.HAS_NEED_URI_PREFIX); if (! it.hasNext() ) return null; wonNodeInfo.setNeedURIPrefix(it.next().asLiteral().getString()); it = model.listObjectsOfProperty(node.asResource(), WON.HAS_CONNECTION_URI_PREFIX); if (! it.hasNext() ) return null; wonNodeInfo.setConnectionURIPrefix(it.next().asLiteral().getString()); it = model.listObjectsOfProperty(node.asResource(), WON.HAS_EVENT_URI_PREFIX); if (! it.hasNext() ) return null; wonNodeInfo.setEventURIPrefix(it.next().asLiteral().getString()); // set the need list URI it = model.listObjectsOfProperty(model.getResource(wonNodeUri.toString()), WON.HAS_NEED_LIST); if (it.hasNext() ) { wonNodeInfo.setNeedListURI(it.next().asNode().getURI()); } else { wonNodeInfo.setNeedListURI(wonNodeInfo.getNeedURIPrefix()); } // set the supported protocol implementations String queryString = "SELECT ?protocol ?param ?value WHERE { ?a <%s> ?c. " + "?c <%s> ?protocol. ?c ?param ?value. FILTER ( ?value != ?protocol ) }"; queryString = String.format(queryString, WON.SUPPORTS_WON_PROTOCOL_IMPL.toString(), RDF.getURI() + "type"); Query protocolQuery = QueryFactory.create(queryString); QueryExecution qexec = QueryExecutionFactory.create(protocolQuery, model); ResultSet rs = qexec.execSelect(); while (rs.hasNext()) { QuerySolution qs = rs.nextSolution(); String protocol = rdfNodeToString(qs.get("protocol")); String param = rdfNodeToString(qs.get("param")); String value = rdfNodeToString(qs.get("value")); wonNodeInfo.setSupportedProtocolImplParamValue(protocol, param, value); } return wonNodeInfo; } }); } private static String rdfNodeToString(RDFNode node) { if (node.isLiteral()) { return node.asLiteral().getString(); } else if (node.isResource()) { return node.asResource().getURI(); } return null; } } public static class MessageUtils { /** * Adds the specified text as a won:hasTextMessage to the model's base resource. * @param message * @return */ public static Model addMessage(Model model, String message) { Resource baseRes = RdfUtils.findOrCreateBaseResource(model); baseRes.addProperty(WON.HAS_TEXT_MESSAGE, message, XSDDatatype.XSDstring); return model; } /** * Creates an RDF model containing a text message. * @param message * @return */ public static Model textMessage(String message) { Model messageModel = createModelWithBaseResource(); Resource baseRes = messageModel.createResource(messageModel.getNsPrefixURI("")); baseRes.addProperty(WON.HAS_TEXT_MESSAGE,message, XSDDatatype.XSDstring); return messageModel; } /** * Creates an RDF model containing a generic message. * * @return */ public static Model genericMessage(URI predicate, URI object) { return genericMessage(new PropertyImpl(predicate.toString()), new ResourceImpl(object.toString())); } /** * Creates an RDF model containing a generic message. * * @return */ public static Model genericMessage(Property predicate, Resource object) { Model messageModel = createModelWithBaseResource(); Resource baseRes = RdfUtils.getBaseResource(messageModel); baseRes.addProperty(RDF.type, WONMSG.TYPE_CONNECTION_MESSAGE); baseRes.addProperty(predicate, object); return messageModel; } /** * Creates an RDF model containing a feedback message referring to the specified resource * that is either positive or negative. * @return */ public static Model binaryFeedbackMessage(URI forResource, boolean isFeedbackPositive) { Model messageModel = createModelWithBaseResource(); Resource baseRes = RdfUtils.getBaseResource(messageModel); Resource feedbackNode = messageModel.createResource(); baseRes.addProperty(WON.HAS_FEEDBACK, feedbackNode); feedbackNode.addProperty(WON.HAS_BINARY_RATING, isFeedbackPositive ? WON.GOOD : WON.BAD); feedbackNode.addProperty(WON.FOR_RESOURCE, messageModel.createResource(forResource.toString())); return messageModel; } /** * Returns the first won:hasTextMessage object, or null if none is found. * Won't work on WonMessage models, removal depends on refactoring of BA facet code * @param model * @return */ @Deprecated public static String getTextMessage(Model model){ Statement stmt = model.getProperty(RdfUtils.getBaseResource(model),WON.HAS_TEXT_MESSAGE); if (stmt != null) { return stmt.getObject().asLiteral().getLexicalForm(); } return null; } /** * Returns the first won:hasTextMessage object, or null if none is found. * @param wonMessage * @return */ public static String getTextMessage(final WonMessage wonMessage){ return RdfUtils.findFirst(wonMessage.getCompleteDataset(), new RdfUtils.ModelVisitor<String>() { @Override public String visit(Model model) { Statement stmt = model.getProperty(model.getResource(wonMessage.getMessageURI().toString()), WON.HAS_TEXT_MESSAGE); if (stmt != null) { return stmt.getObject().asLiteral().getLexicalForm(); } URI remoteMessageURI = wonMessage.getCorrespondingRemoteMessageURI(); if (remoteMessageURI != null){ stmt = model.getProperty(model.getResource(remoteMessageURI.toString()), WON.HAS_TEXT_MESSAGE); if (stmt != null) { return stmt.getObject().asLiteral().getLexicalForm(); } } return null; } }); } /** * Converts the specified hint message into a Match object. * @param wonMessage * @return a match object or null if the message is not a hint message. */ public static Match toMatch(final WonMessage wonMessage) { if (!WONMSG.TYPE_HINT.equals(wonMessage.getMessageType().getResource())){ return null; } Match match = new Match(); match.setFromNeed(wonMessage.getReceiverNeedURI()); Dataset messageContent = wonMessage.getMessageContent(); RDFNode score = RdfUtils.findOnePropertyFromResource(messageContent, wonMessage.getMessageURI(), WON.HAS_MATCH_SCORE); if (!score.isLiteral()) return null; match.setScore(score.asLiteral().getDouble()); RDFNode counterpart = RdfUtils.findOnePropertyFromResource(messageContent, wonMessage.getMessageURI(), WON.HAS_MATCH_COUNTERPART); if (!counterpart.isResource()) return null; match.setToNeed(URI.create(counterpart.asResource().getURI())); return match; } public static WonMessage copyByDatasetSerialization(final WonMessage toWrap) { WonMessage copied = new WonMessage(RdfUtils.readDatasetFromString( RdfUtils.writeDatasetToString(toWrap.getCompleteDataset(), Lang.TRIG) ,Lang.TRIG)); return copied; } } public static class FacetUtils { public static URI getFacet(WonMessage message){ URI uri = getObjectOfMessageProperty(message, WON.HAS_FACET); if (uri == null) { uri = getObjectOfRemoteMessageProperty(message, WON.HAS_REMOTE_FACET); } return uri; } public static URI getRemoteFacet(WonMessage message) { URI uri = getObjectOfMessageProperty(message, WON.HAS_REMOTE_FACET); if (uri == null) { uri = getObjectOfRemoteMessageProperty(message, WON.HAS_FACET); } return uri; } /** * Returns a property of the message (i.e. the object of the first triple ( [message-uri] [property] X ) * found in one of the content graphs of the specified message. */ private static URI getObjectOfMessageProperty(final WonMessage message, final Property property) { List<String> contentGraphUris = message.getContentGraphURIs(); Dataset contentGraphs = message.getMessageContent(); URI messageURI = message.getMessageURI(); for (String graphUri: contentGraphUris) { Model contentGraph = contentGraphs.getNamedModel(graphUri); StmtIterator smtIter = contentGraph.getResource(messageURI.toString()).listProperties(property); if (smtIter.hasNext()) { return URI.create(smtIter.nextStatement().getObject().asResource().getURI()); } } return null; } /** * Returns a property of the corresponding remote message (i.e. the object of the first triple ( * [corresponding-remote-message-uri] [property] X ) * found in one of the content graphs of the specified message. */ private static URI getObjectOfRemoteMessageProperty(final WonMessage message, final Property property) { List<String> contentGraphUris = message.getContentGraphURIs(); Dataset contentGraphs = message.getMessageContent(); URI messageURI = message.getCorrespondingRemoteMessageURI(); if (messageURI != null) { for (String graphUri : contentGraphUris) { Model contentGraph = contentGraphs.getNamedModel(graphUri); StmtIterator smtIter = contentGraph.getResource(messageURI.toString()).listProperties(property); if (smtIter.hasNext()) { return URI.create(smtIter.nextStatement().getObject().asResource().getURI()); } } } return null; } /** * Returns all facets found in the model, attached to the null relative URI '<>'. * Returns an empty collection if there is no such facet. * @param content * @return */ public static Collection<URI> getFacets(Model content) { Resource baseRes = RdfUtils.getBaseResource(content); StmtIterator stmtIterator = baseRes.listProperties(WON.HAS_FACET); LinkedList<URI> ret = new LinkedList<URI>(); while (stmtIterator.hasNext()){ RDFNode object = stmtIterator.nextStatement().getObject(); if (object.isURIResource()){ ret.add(URI.create(object.asResource().getURI())); } } return ret; } /** * Adds a triple to the model of the form <> won:hasFacet [facetURI]. * @param content * @param facetURI */ public static void addFacet(final Model content, final URI facetURI) { Resource baseRes = RdfUtils.getBaseResource(content); baseRes.addProperty(WON.HAS_FACET, content.createResource(facetURI.toString())); } /** * Adds a triple to the model of the form <> won:hasRemoteFacet [facetURI]. * @param content * @param facetURI */ public static void addRemoteFacet(final Model content, final URI facetURI) { Resource baseRes = RdfUtils.getBaseResource(content); baseRes.addProperty(WON.HAS_REMOTE_FACET, content.createResource(facetURI.toString())); } /** * Creates a model for connecting two facets.CONNECTED.getURI().equals(connectionState) * @return */ public static Model createFacetModelForHintOrConnect(URI facet, URI remoteFacet) { Model model = ModelFactory.createDefaultModel(); Resource baseResource = findOrCreateBaseResource(model); WonRdfUtils.FacetUtils.addFacet(model, facet); WonRdfUtils.FacetUtils.addRemoteFacet(model, remoteFacet); logger.debug("facet model contains these facets: from:{} to:{}", facet, remoteFacet); return model; } } public static class ConnectionUtils { public static boolean isConnected(Dataset connectionDataset, URI connectionUri) { URI connectionState = getConnectionState(connectionDataset, connectionUri); return ConnectionState.CONNECTED.getURI().equals(connectionState); } public static URI getConnectionState(Dataset connectionDataset, URI connectionUri) { Path statePath = PathParser.parse("won:hasConnectionState", DefaultPrefixUtils.getDefaultPrefixes()); return RdfUtils.getURIPropertyForPropertyPath(connectionDataset, connectionUri, statePath); } } public static class NeedUtils { public static URI queryWonNode(Dataset content) { URI wonNodeURI = null; final String queryString = "PREFIX won: <http://purl.org/webofneeds/model#> " + "SELECT * { { ?s won:hasWonNode ?wonNode } UNION { GRAPH ?g { ?s won:hasWonNode ?wonNode } } }"; Query query = QueryFactory.create(queryString); try (QueryExecution qexec = QueryExecutionFactory.create(query, content)) { ResultSet results = qexec.execSelect(); boolean foundOneResult = false; for (; results.hasNext(); ) { if (foundOneResult) throw new IncorrectPropertyCountException(1,2); foundOneResult = true; QuerySolution solution = results.nextSolution(); Resource r = solution.getResource("wonNode"); try { wonNodeURI = new URI(r.getURI()); } catch (URISyntaxException e) { logger.warn("caught URISyntaxException:", e); throw new DataIntegrityException("could not parse wonNodeUri: " + r.getURI(), e); } } } return wonNodeURI; } public static NeedState queryActiveStatus(Model model, URI needURI) { StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.IS_IN_STATE, (RDFNode) null); if (!iterator.hasNext()) return null; NeedState result = null; while (iterator.hasNext()) { Statement s = iterator.nextStatement(); if (s.getObject().equals(WON.NEED_STATE_ACTIVE)) { if (result != null && result.equals(NeedState.INACTIVE)) throw new IncorrectPropertyCountException("More than one result found, but only one expected", 1,2); result = NeedState.ACTIVE; } else if (s.getObject().equals(WON.NEED_STATE_INACTIVE)) { if (result != null && result.equals(NeedState.ACTIVE)) throw new IncorrectPropertyCountException("More than one result found, but only one expected", 1,2); result = NeedState.INACTIVE; } } return result; } public static NeedState queryActiveStatus(Dataset content, final URI needURI) { return RdfUtils.findOne(content, new RdfUtils.ModelVisitor<NeedState>() { @Override public NeedState visit(final Model model) { return queryActiveStatus(model, needURI); } }, true); } /** * returns a list of Facet objects each set with the NeedURI and the TypeURI * * @param needURI URI which will be set to the facets * @param dataset <code>Dataset</code> object which will be searched for the facets * @return list of facets */ public static List<Facet> getFacets(final URI needURI, Dataset dataset) { return RdfUtils.visitFlattenedToList(dataset, new RdfUtils.ModelVisitor<List<Facet>>() { @Override public List<Facet> visit(final Model model) { return getFacets(needURI, model); } }); } /** * returns a list of Facet objects each set with the NeedURI and the TypeURI * * @param needURI URI which will be set to the facets * @param model <code>Model</code> object which will be searched for the facets * @return list of facets */ public static List<Facet> getFacets(URI needURI, Model model) { List<Facet> result = new ArrayList<Facet>(); StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.HAS_FACET, (RDFNode) null); while (iterator.hasNext()) { Facet f = new Facet(); f.setNeedURI(needURI); f.setTypeURI(URI.create(iterator.nextStatement().getObject().asResource().getURI())); result.add(f); } return result; } /** * searches for a subject of type won:Need and returns the NeedURI * * @param dataset <code>Dataset</code> object which will be searched for the NeedURI * @return <code>URI</code> which is of type won:Need */ public static URI getNeedURI(Dataset dataset) { return RdfUtils.findOne(dataset, new RdfUtils.ModelVisitor<URI>() { @Override public URI visit(final Model model) { return getNeedURI(model); } }, true); } /** * searches for a subject of type won:Need and returns the NeedURI * * @param model <code>Model</code> object which will be searched for the NeedURI * @return <code>URI</code> which is of type won:Need */ public static URI getNeedURI(Model model) { List<URI> needURIs = new ArrayList<URI>(); ResIterator iterator = model.listSubjectsWithProperty(RDF.type, WON.NEED); while (iterator.hasNext()) { needURIs.add(URI.create(iterator.next().getURI())); } if (needURIs.size() == 0) return null; else if (needURIs.size() == 1) return needURIs.get(0); else if (needURIs.size() > 1) { URI u = needURIs.get(0); for (URI uri : needURIs) { if (!uri.equals(u)) throw new IncorrectPropertyCountException(1,2); } return u; } else return null; } /** * return the needURI of a connection * * @param dataset <code>Dataset</code> object which contains connection information * @param connectionURI * @return <code>URI</code> of the need */ public static URI getLocalNeedURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.BELONGS_TO_NEED).asResource().getURI()); } public static URI getRemoteNeedURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.HAS_REMOTE_NEED).asResource().getURI()); } public static URI getWonNodeURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.HAS_WON_NODE).asResource().getURI()); } public static URI getRemoteConnectionURIFromConnection(Dataset dataset, final URI connectionURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, connectionURI, WON.HAS_REMOTE_CONNECTION).asResource().getURI()); } public static URI getWonNodeURIFromNeed(Dataset dataset, final URI needURI) { return URI.create(RdfUtils.findOnePropertyFromResource( dataset, needURI, WON.HAS_WON_NODE).asResource().getURI()); } /** * Extracts all triples from the dataset (which is expected to be a dataset describing * one need, expressed in multiple named graphs) and copies them to a new model. * @param dataset * @return */ public static Model getNeedModelFromNeedDataset(Dataset dataset){ assert dataset != null : "dataset must not be null"; final Model result = ModelFactory.createDefaultModel(); RdfUtils.visit(dataset,new RdfUtils.ModelVisitor<Object>() { @Override public Object visit(Model model) { result.add(model); return null; } }); return result; } public static URI queryConnectionContainer(Dataset dataset, final URI needURI) { return RdfUtils.findOne(dataset, new RdfUtils.ModelVisitor<URI>() { @Override public URI visit(final Model model) { return queryConnectionContainer(model, needURI); } }, true); } public static URI queryConnectionContainer(Model model, URI needURI) { StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.HAS_CONNECTIONS, (RDFNode) null); if (!iterator.hasNext()) { return null; } URI result = null; while (iterator.hasNext()) { Statement s = iterator.nextStatement(); URI nextURI = URI.create(s.getResource().getURI()); if (result != null && !result.equals(nextURI)) throw new IncorrectPropertyCountException(1,2); result = nextURI; } return result; } public static void removeConnectionContainer(Dataset dataset, final URI needURI) { RdfUtils.visit(dataset, new RdfUtils.ModelVisitor<Object>() { @Override public Object visit(final Model model) { removeConnectionContainer(model, needURI); return null; } }); } public static void removeConnectionContainer(Model model, URI needURI) { StmtIterator iterator = model.listStatements(model.createResource(needURI.toString()), WON.HAS_CONNECTIONS, (RDFNode) null); URI result = null; while (iterator.hasNext()) { model.remove(iterator.nextStatement()); } } public static String getNeedTitle(Dataset needDataset, URI needUri) { Path titlePath = PathParser.parse("won:hasContent/dc:title", DefaultPrefixUtils.getDefaultPrefixes()); String titleString = RdfUtils.getStringPropertyForPropertyPath(needDataset, needUri, titlePath); return titleString; } /** * Checks if the need has set a certain flag set * * @param dataset need dataset * @param needURI URI of the need * @param flag Resource flag to be tested * @return true if flag is there, false otherwise */ public static boolean hasFlag(Dataset dataset, String needURI, Resource flag) { Boolean ret = RdfUtils.findFirst(dataset, new RdfUtils.ModelVisitor<Boolean>() { @Override public Boolean visit(Model model) { Resource needResource = model.getResource(needURI); boolean test = needResource.hasProperty(WON.HAS_FLAG, flag); if (test) { return true; } else { return null; } } }); if (ret == null) { ret = false; } return ret; } public static Float getLocationLatitude(Model need, URI needUri) { Path propertyPath = PathParser.parse("won:hasContent/won:hasContentDescription/won:hasLocation/<s:geo>/<s:latitude>", DefaultPrefixUtils.getDefaultPrefixes()); Float latitude = null; String lat = RdfUtils.getStringPropertyForPropertyPath(need, needUri, propertyPath); if (lat != null) { latitude = new Float(lat); } return latitude; } public static Float getLocationLongitude(Model need, URI needUri) { Path propertyPath = PathParser.parse("won:hasContent/won:hasContentDescription/won:hasLocation/<s:geo>/<s:longitude>", DefaultPrefixUtils.getDefaultPrefixes()); Float longitude = null; String lon = RdfUtils.getStringPropertyForPropertyPath(need, needUri, propertyPath); if (lon != null) { longitude = new Float(lon); } return longitude; } public static List<String> getTags(Dataset needDataset) { List<String> tags = new LinkedList<>(); Model model = NeedUtils.getNeedModelFromNeedDataset(needDataset); URI needURI = NeedUtils.getNeedURI(needDataset); Resource needContent = model.getResource(needURI.toString()).getProperty(WON.HAS_CONTENT).getResource(); StmtIterator it = needContent.listProperties(WON.HAS_TAG); while (it.hasNext()) { Statement stmt = it.next(); RDFNode obj = stmt.getObject(); if (obj.isLiteral()) { tags.add(obj.asLiteral().getString()); } } return tags; } public static String getNeedTitle(Dataset needDataset) { Path titlePath = PathParser.parse("won:hasContent/dc:title", DefaultPrefixUtils.getDefaultPrefixes()); URI needUri = NeedUtils.getNeedURI(needDataset); return RdfUtils.getStringPropertyForPropertyPath(needDataset, needUri, titlePath); } public static String getNeedDescription(Dataset needDataset) { Path descriptionPath = PathParser.parse("won:hasContent/won:hasTextDescription", DefaultPrefixUtils.getDefaultPrefixes()); URI needUri = NeedUtils.getNeedURI(needDataset); return RdfUtils.getStringPropertyForPropertyPath(needDataset, needUri, descriptionPath); } public static Resource getNeedResource(final Model needModel) { assert needModel != null : "needModel must not be null"; Resource needResource = null; //try fetching the base URI resource. If that is a Need, we'll assume we found the need resource String baseUri = needModel.getNsPrefixURI(""); if (baseUri != null) { //fetch the resource, check if it has the rdf:type won:Need needResource = needModel.getResource(baseUri); if (!needResource.hasProperty(RDF.type, WON.NEED)) { needResource = null; } } if (needResource != null) return needResource; //found no need resource yet. Try to find it by type. We expect to find exactly one, otherwise we report an error ResIterator it = needModel.listSubjectsWithProperty(RDF.type, WON.NEED); if (it.hasNext()) needResource = it.next(); if (it.hasNext()) throw new IllegalArgumentException("expecting only one resource of type won:Need in specified model"); if (needResource == null) throw new IllegalArgumentException("expected to find a resource of type won:Need in specified model"); return needResource; } } private static Model createModelWithBaseResource() { Model model = ModelFactory.createDefaultModel(); model.setNsPrefix("", "no:uri"); model.createResource(model.getNsPrefixURI("")); return model; } }
refactored getNeedTitle method
webofneeds/won-core/src/main/java/won/protocol/util/WonRdfUtils.java
refactored getNeedTitle method
Java
apache-2.0
b02f4f709baf8c3d4e800ab6bb3216f0908f679b
0
hasinhamrah/myket-licensing-sample
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.vending.licensing; import android.content.Context; import android.content.SharedPreferences; import android.util.Log; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URLEncodedUtils; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Non-caching policy. All requests will be sent to the licensing service, * and no local caching is performed. * <p/> * Using a non-caching policy ensures that there is no local preference data * for malicious users to tamper with. As a side effect, applications * will not be permitted to run while offline. Developers should carefully * weigh the risks of using this Policy over one which implements caching, * such as ServerManagedPolicy. * <p/> * Access to the application is only allowed if a LICENSED response is. * received. All other responses (including RETRY) will deny access. */ public class MyketServerManagedPolicy implements Policy { private static final String TAG = "MyketPolicy"; private static final String PREFS_FILE = "com.android.vending.licensing.MyketServerManagedPolicy"; private static final String PREF_LAST_RESPONSE = "lastResponse"; private static final String PREF_VALIDITY_TIMESTAMP = "validityTimestamp"; private static final String PREF_RETRY_UNTIL = "retryUntil"; private static final String PREF_MAX_RETRIES = "maxRetries"; private static final String PREF_RETRY_COUNT = "retryCount"; private static final String PREF_LAST_RESPONSE_TIME = "lastResponseTime"; private static final String PREF_LAST_BOOT_TIME = "lastBootTime"; private static final String DEFAULT_VALIDITY_TIMESTAMP = "0"; private static final String DEFAULT_RETRY_UNTIL = "0"; private static final String DEFAULT_MAX_RETRIES = "0"; private static final String DEFAULT_RETRY_COUNT = "0"; private static final String DEFAULT_LAST_RESPONSE_TIME = "0"; private static final String DEFAULT_LAST_BOOT_TIME = "0"; private static final long MILLIS_PER_MINUTE = 60 * 1000; private long mValidityTimestamp; private long mRetryUntil; private long mMaxRetries; private long mRetryCount; private long mLastResponseTime = 0; private int mLastResponse; private PreferenceObfuscator mPreferences; private long mLastBootTime = 0; /** * @param context The context for the current application * @param obfuscator An obfuscator to be used with preferences. */ public MyketServerManagedPolicy(Context context, Obfuscator obfuscator) { // Import old values SharedPreferences sp = context.getSharedPreferences(PREFS_FILE, Context.MODE_PRIVATE); mPreferences = new PreferenceObfuscator(sp, obfuscator); mLastResponse = Integer.parseInt( mPreferences.getString(PREF_LAST_RESPONSE, Integer.toString(Policy.RETRY))); mValidityTimestamp = Long.parseLong(mPreferences.getString(PREF_VALIDITY_TIMESTAMP, DEFAULT_VALIDITY_TIMESTAMP)); mRetryUntil = Long.parseLong(mPreferences.getString(PREF_RETRY_UNTIL, DEFAULT_RETRY_UNTIL)); mMaxRetries = Long.parseLong(mPreferences.getString(PREF_MAX_RETRIES, DEFAULT_MAX_RETRIES)); mRetryCount = Long.parseLong(mPreferences.getString(PREF_RETRY_COUNT, DEFAULT_RETRY_COUNT)); mLastResponseTime = Long.parseLong(mPreferences.getString(PREF_LAST_RESPONSE_TIME, DEFAULT_LAST_RESPONSE_TIME)); mLastBootTime = Long.parseLong(mPreferences.getString(PREF_LAST_BOOT_TIME, DEFAULT_LAST_BOOT_TIME)); } /** * Process a new response from the license server. * <p/> * This data will be used for computing future policy decisions. The * following parameters are processed: * <ul> * <li>VT: the timestamp that the client should consider the response * valid until * <li>GT: the timestamp that the client should ignore retry errors until * <li>GR: the number of retry errors that the client should ignore * </ul> * * @param response the result from validating the server response * @param rawData the raw server response data */ public void processServerResponse(int response, ResponseData rawData) { // Update retry counter if (response != Policy.RETRY) { setRetryCount(0); } else { setRetryCount(mRetryCount + 1); } if (response == Policy.LICENSED) { // Update server policy data Map<String, String> extras = decodeExtras(rawData.extra); setValidityTimestamp(extras.get("VT")); setRetryUntil(extras.get("GT")); setMaxRetries(extras.get("GR")); response = validateTimeOrigin(response, rawData.timestamp); } else if (response == Policy.NOT_LICENSED) { // Clear out stale policy data setValidityTimestamp(DEFAULT_VALIDITY_TIMESTAMP); setRetryUntil(DEFAULT_RETRY_UNTIL); setMaxRetries(DEFAULT_MAX_RETRIES); } setLastResponse(response); mPreferences.commit(); } /** * Returns the current time in milliseconds since device was booted * * @return current time in milliseconds */ private long currentBootTime() { return System.nanoTime() / 1000000; } /** * Validate if the time origin of device clock is the same time as origin of server clock * * @param l the response * @param serverTimestamp time of server clock * @return validated license */ private int validateTimeOrigin(int l, long serverTimestamp) { long ts = System.currentTimeMillis(); if (ts < serverTimestamp || serverTimestamp + MILLIS_PER_MINUTE < ts) { return Policy.RETRY; } return l; } /** * Set the last license response received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param l the response */ private void setLastResponse(int l) { mLastResponseTime = System.currentTimeMillis(); mLastBootTime = currentBootTime(); mLastResponse = l; mPreferences.putString(PREF_LAST_RESPONSE, Integer.toString(l)); mPreferences.putString(PREF_LAST_RESPONSE_TIME, Long.toString(mLastResponseTime)); mPreferences.putString(PREF_LAST_BOOT_TIME, Long.toString(mLastBootTime)); } /** * Set the current retry count and add to preferences. You must manually * call PreferenceObfuscator.commit() to commit these changes to disk. * * @param c the new retry count */ private void setRetryCount(long c) { mRetryCount = c; mPreferences.putString(PREF_RETRY_COUNT, Long.toString(c)); } public long getRetryCount() { return mRetryCount; } /** * Set the last validity timestamp (VT) received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param validityTimestamp the VT string received */ private void setValidityTimestamp(String validityTimestamp) { Long lValidityTimestamp; try { lValidityTimestamp = Long.parseLong(validityTimestamp); } catch (NumberFormatException e) { // No response or not parsable, expire in one minute. Log.w(TAG, "License validity timestamp (VT) missing, caching for a minute"); lValidityTimestamp = System.currentTimeMillis() + MILLIS_PER_MINUTE; validityTimestamp = Long.toString(lValidityTimestamp); } mValidityTimestamp = lValidityTimestamp; mPreferences.putString(PREF_VALIDITY_TIMESTAMP, validityTimestamp); } public long getValidityTimestamp() { return mValidityTimestamp; } /** * Set the retry until timestamp (GT) received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param retryUntil the GT string received */ private void setRetryUntil(String retryUntil) { Long lRetryUntil; try { lRetryUntil = Long.parseLong(retryUntil); } catch (NumberFormatException e) { // No response or not parsable, expire immediately Log.w(TAG, "License retry timestamp (GT) missing, grace period disabled"); retryUntil = "0"; lRetryUntil = 0l; } mRetryUntil = lRetryUntil; mPreferences.putString(PREF_RETRY_UNTIL, retryUntil); } public long getRetryUntil() { return mRetryUntil; } /** * Set the max retries value (GR) as received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param maxRetries the GR string received */ private void setMaxRetries(String maxRetries) { Long lMaxRetries; try { lMaxRetries = Long.parseLong(maxRetries); } catch (NumberFormatException e) { // No response or not parsable, expire immediately Log.w(TAG, "Licence retry count (GR) missing, grace period disabled"); maxRetries = "0"; lMaxRetries = 0l; } mMaxRetries = lMaxRetries; mPreferences.putString(PREF_MAX_RETRIES, maxRetries); } public long getMaxRetries() { return mMaxRetries; } /** * {@inheritDoc} * <p/> * This implementation allows access if either:<br> * <ol> * <li>a LICENSED response was received within the validity period * <li>a RETRY response was received in the last minute, and we are under * the RETRY count or in the RETRY period. * </ol> */ public boolean allowAccess() { long ts = System.currentTimeMillis(); if (mLastResponse == Policy.LICENSED && mLastResponseTime < ts) { // Check if device clock was changed. if (currentBootTime() - mLastBootTime > ts - mLastResponseTime + MILLIS_PER_MINUTE) { setLastResponse(Policy.RETRY); return false; } // Check if the LICENSED response occurred within the validity timeout. if (ts <= mValidityTimestamp) { // Cached LICENSED response is still valid. return true; } } else if (mLastResponse == Policy.RETRY && ts < mLastResponseTime + MILLIS_PER_MINUTE) { // Only allow access if we are within the retry period or we haven't used up our // max retries. return (ts <= mRetryUntil || mRetryCount <= mMaxRetries); } return false; } private Map<String, String> decodeExtras(String extras) { Map<String, String> results = new HashMap<String, String>(); try { URI rawExtras = new URI("?" + extras); List<NameValuePair> extraList = URLEncodedUtils.parse(rawExtras, "UTF-8"); for (NameValuePair item : extraList) { results.put(item.getName(), item.getValue()); } } catch (URISyntaxException e) { Log.w(TAG, "Invalid syntax error while decoding extras data from server."); } return results; } }
app/src/main/java/com/google/android/vending/licensing/MyketServerManagedPolicy.java
/* * Copyright (C) 2010 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.vending.licensing; import android.content.Context; import android.content.SharedPreferences; import android.util.Log; import org.apache.http.NameValuePair; import org.apache.http.client.utils.URLEncodedUtils; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Non-caching policy. All requests will be sent to the licensing service, * and no local caching is performed. * <p/> * Using a non-caching policy ensures that there is no local preference data * for malicious users to tamper with. As a side effect, applications * will not be permitted to run while offline. Developers should carefully * weigh the risks of using this Policy over one which implements caching, * such as ServerManagedPolicy. * <p/> * Access to the application is only allowed if a LICENSED response is. * received. All other responses (including RETRY) will deny access. */ public class MyketServerManagedPolicy implements Policy { private static final String TAG = "MyketPolicy"; private static final String PREFS_FILE = "com.android.vending.licensing.MyketServerManagedPolicy"; private static final String PREF_LAST_RESPONSE = "lastResponse"; private static final String PREF_VALIDITY_TIMESTAMP = "validityTimestamp"; private static final String PREF_RETRY_UNTIL = "retryUntil"; private static final String PREF_MAX_RETRIES = "maxRetries"; private static final String PREF_RETRY_COUNT = "retryCount"; private static final String PREF_LAST_RESPONSE_TIME = "lastResponseTime"; private static final String PREF_LAST_BOOT_TIME = "lastBootTime"; private static final String DEFAULT_VALIDITY_TIMESTAMP = "0"; private static final String DEFAULT_RETRY_UNTIL = "0"; private static final String DEFAULT_MAX_RETRIES = "0"; private static final String DEFAULT_RETRY_COUNT = "0"; private static final String DEFAULT_LAST_RESPONSE_TIME = "0"; private static final String DEFAULT_LAST_BOOT_TIME = "0"; private static final long MILLIS_PER_MINUTE = 60 * 1000; private long mValidityTimestamp; private long mRetryUntil; private long mMaxRetries; private long mRetryCount; private long mLastResponseTime = 0; private int mLastResponse; private PreferenceObfuscator mPreferences; private long mLastBootTime = 0; /** * @param context The context for the current application * @param obfuscator An obfuscator to be used with preferences. */ public MyketServerManagedPolicy(Context context, Obfuscator obfuscator) { // Import old values SharedPreferences sp = context.getSharedPreferences(PREFS_FILE, Context.MODE_PRIVATE); mPreferences = new PreferenceObfuscator(sp, obfuscator); mLastResponse = Integer.parseInt( mPreferences.getString(PREF_LAST_RESPONSE, Integer.toString(Policy.RETRY))); mValidityTimestamp = Long.parseLong(mPreferences.getString(PREF_VALIDITY_TIMESTAMP, DEFAULT_VALIDITY_TIMESTAMP)); mRetryUntil = Long.parseLong(mPreferences.getString(PREF_RETRY_UNTIL, DEFAULT_RETRY_UNTIL)); mMaxRetries = Long.parseLong(mPreferences.getString(PREF_MAX_RETRIES, DEFAULT_MAX_RETRIES)); mRetryCount = Long.parseLong(mPreferences.getString(PREF_RETRY_COUNT, DEFAULT_RETRY_COUNT)); mLastResponseTime = Long.parseLong(mPreferences.getString(PREF_LAST_RESPONSE_TIME, DEFAULT_LAST_RESPONSE_TIME)); mLastBootTime = Long.parseLong(mPreferences.getString(PREF_LAST_BOOT_TIME, DEFAULT_LAST_BOOT_TIME)); } /** * Process a new response from the license server. * <p/> * This data will be used for computing future policy decisions. The * following parameters are processed: * <ul> * <li>VT: the timestamp that the client should consider the response * valid until * <li>GT: the timestamp that the client should ignore retry errors until * <li>GR: the number of retry errors that the client should ignore * </ul> * * @param response the result from validating the server response * @param rawData the raw server response data */ public void processServerResponse(int response, ResponseData rawData) { // Update retry counter if (response != Policy.RETRY) { setRetryCount(0); } else { setRetryCount(mRetryCount + 1); } if (response == Policy.LICENSED) { // Update server policy data Map<String, String> extras = decodeExtras(rawData.extra); setValidityTimestamp(extras.get("VT")); setRetryUntil(extras.get("GT")); setMaxRetries(extras.get("GR")); response = validateTimeOrigin(response, rawData.timestamp); } else if (response == Policy.NOT_LICENSED) { // Clear out stale policy data setValidityTimestamp(DEFAULT_VALIDITY_TIMESTAMP); setRetryUntil(DEFAULT_RETRY_UNTIL); setMaxRetries(DEFAULT_MAX_RETRIES); } setLastResponse(response); mPreferences.commit(); } /** * Returns the current time in milliseconds since device was booted * * @return current time in milliseconds */ private long currentBootTime() { return System.nanoTime() / 1000; } /** * Validate if the time origin of device clock is the same time as origin of server clock * * @param l the response * @param serverTimestamp time of server clock * @return validated license */ private int validateTimeOrigin(int l, long serverTimestamp) { long ts = System.currentTimeMillis(); if (ts < serverTimestamp || serverTimestamp + MILLIS_PER_MINUTE < ts) { return Policy.RETRY; } return l; } /** * Set the last license response received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param l the response */ private void setLastResponse(int l) { mLastResponseTime = System.currentTimeMillis(); mLastBootTime = currentBootTime(); mLastResponse = l; mPreferences.putString(PREF_LAST_RESPONSE, Integer.toString(l)); mPreferences.putString(PREF_LAST_RESPONSE_TIME, Long.toString(mLastResponseTime)); mPreferences.putString(PREF_LAST_BOOT_TIME, Long.toString(mLastBootTime)); } /** * Set the current retry count and add to preferences. You must manually * call PreferenceObfuscator.commit() to commit these changes to disk. * * @param c the new retry count */ private void setRetryCount(long c) { mRetryCount = c; mPreferences.putString(PREF_RETRY_COUNT, Long.toString(c)); } public long getRetryCount() { return mRetryCount; } /** * Set the last validity timestamp (VT) received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param validityTimestamp the VT string received */ private void setValidityTimestamp(String validityTimestamp) { Long lValidityTimestamp; try { lValidityTimestamp = Long.parseLong(validityTimestamp); } catch (NumberFormatException e) { // No response or not parsable, expire in one minute. Log.w(TAG, "License validity timestamp (VT) missing, caching for a minute"); lValidityTimestamp = System.currentTimeMillis() + MILLIS_PER_MINUTE; validityTimestamp = Long.toString(lValidityTimestamp); } mValidityTimestamp = lValidityTimestamp; mPreferences.putString(PREF_VALIDITY_TIMESTAMP, validityTimestamp); } public long getValidityTimestamp() { return mValidityTimestamp; } /** * Set the retry until timestamp (GT) received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param retryUntil the GT string received */ private void setRetryUntil(String retryUntil) { Long lRetryUntil; try { lRetryUntil = Long.parseLong(retryUntil); } catch (NumberFormatException e) { // No response or not parsable, expire immediately Log.w(TAG, "License retry timestamp (GT) missing, grace period disabled"); retryUntil = "0"; lRetryUntil = 0l; } mRetryUntil = lRetryUntil; mPreferences.putString(PREF_RETRY_UNTIL, retryUntil); } public long getRetryUntil() { return mRetryUntil; } /** * Set the max retries value (GR) as received from the server and add to * preferences. You must manually call PreferenceObfuscator.commit() to * commit these changes to disk. * * @param maxRetries the GR string received */ private void setMaxRetries(String maxRetries) { Long lMaxRetries; try { lMaxRetries = Long.parseLong(maxRetries); } catch (NumberFormatException e) { // No response or not parsable, expire immediately Log.w(TAG, "Licence retry count (GR) missing, grace period disabled"); maxRetries = "0"; lMaxRetries = 0l; } mMaxRetries = lMaxRetries; mPreferences.putString(PREF_MAX_RETRIES, maxRetries); } public long getMaxRetries() { return mMaxRetries; } /** * {@inheritDoc} * <p/> * This implementation allows access if either:<br> * <ol> * <li>a LICENSED response was received within the validity period * <li>a RETRY response was received in the last minute, and we are under * the RETRY count or in the RETRY period. * </ol> */ public boolean allowAccess() { long ts = System.currentTimeMillis(); if (mLastResponse == Policy.LICENSED && mLastResponseTime < ts) { // Check if device clock was changed. if (currentBootTime() - mLastBootTime > ts - mLastResponseTime + MILLIS_PER_MINUTE) { setLastResponse(Policy.RETRY); return false; } // Check if the LICENSED response occurred within the validity timeout. if (ts <= mValidityTimestamp) { // Cached LICENSED response is still valid. return true; } } else if (mLastResponse == Policy.RETRY && ts < mLastResponseTime + MILLIS_PER_MINUTE) { // Only allow access if we are within the retry period or we haven't used up our // max retries. return (ts <= mRetryUntil || mRetryCount <= mMaxRetries); } return false; } private Map<String, String> decodeExtras(String extras) { Map<String, String> results = new HashMap<String, String>(); try { URI rawExtras = new URI("?" + extras); List<NameValuePair> extraList = URLEncodedUtils.parse(rawExtras, "UTF-8"); for (NameValuePair item : extraList) { results.put(item.getName(), item.getValue()); } } catch (URISyntaxException e) { Log.w(TAG, "Invalid syntax error while decoding extras data from server."); } return results; } }
Fix bug on nanoTime. nano is 10^-9 and milli is 10^-3
app/src/main/java/com/google/android/vending/licensing/MyketServerManagedPolicy.java
Fix bug on nanoTime. nano is 10^-9 and milli is 10^-3
Java
apache-2.0
2f817f3f88222685afd57e4088245b05cfbf486d
0
trejkaz/derby,apache/derby,apache/derby,trejkaz/derby,apache/derby,trejkaz/derby,apache/derby
/* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.DataSourceTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.io.File; import java.io.Serializable; import java.security.AccessController; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Savepoint; import java.util.Hashtable; import java.util.Iterator; import org.apache.derbyTesting.functionTests.tests.jdbcapi.AssertEventCatcher; import javax.sql.ConnectionEvent; import javax.sql.ConnectionEventListener; import javax.sql.ConnectionPoolDataSource; import javax.sql.DataSource; import javax.sql.PooledConnection; import javax.sql.XAConnection; import javax.sql.XADataSource; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derby.jdbc.ClientConnectionPoolDataSource; import org.apache.derby.jdbc.ClientDataSource; import org.apache.derby.jdbc.ClientXADataSource; import org.apache.derby.jdbc.EmbeddedConnectionPoolDataSource; import org.apache.derby.jdbc.EmbeddedDataSource; import org.apache.derby.jdbc.EmbeddedSimpleDataSource; import org.apache.derby.jdbc.EmbeddedXADataSource; import org.apache.derbyTesting.functionTests.util.SecurityCheck; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.CleanDatabaseTestSetup; import org.apache.derbyTesting.junit.DatabasePropertyTestSetup; import org.apache.derbyTesting.junit.J2EEDataSource; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.JDBCDataSource; import org.apache.derbyTesting.junit.TestConfiguration; /** * Test the various embedded DataSource implementations of Derby. * * Performs SecurityCheck analysis on the JDBC objects returned. * This is because this test returns to the client a number of * different implementations of Connection, Statement etc. * * @see org.apache.derbyTesting.functionTests.util.SecurityCheck * */ public class DataSourceTest extends BaseJDBCTestCase { private static final String dbName = TestConfiguration.getCurrent().getDefaultDatabaseName(); /** * A hashtable of opened connections. This is used when checking to * make sure connection strings are unique; we need to make sure all * the connections are closed when we are done, so they are stored * in this hashtable */ protected static Hashtable conns = new Hashtable(); /** The expected format of a connection string. In English: * "<classname>@<hashcode> (XID=<xid>), (SESSION = <sessionid>), * (DATABASE=<dbname>), (DRDAID = <drdaid>)" */ private static final String CONNSTRING_FORMAT = "\\S+@\\-?[0-9]+.* \\(XID = .*\\), \\(SESSIONID = [0-9]+\\), " + "\\(DATABASE = [A-Za-z]+\\), \\(DRDAID = .*\\) "; /** * Hang onto the SecurityCheck class while running the * tests so that it is not garbage collected during the * test and lose the information it has collected, * in case it should get printed out. */ private final Object nogc = SecurityCheck.class; public DataSourceTest(String name) { super(name); } /** * Return a suite of tests that are run with a lower lock timeout. * * @param postfix suite name postfix * @return A suite of tests being run with a lower lock timeout. */ private static Test getTimeoutSuite(String postfix) { TestSuite suite = new TestSuite("Lower lock timeout" + postfix); suite.addTest(new DataSourceTest("timeoutTestDerby1144PooledDS")); suite.addTest(new DataSourceTest("timeoutTestDerby1144XADS")); // Reduce the timeout threshold to make the tests run faster. return DatabasePropertyTestSetup.setLockTimeouts(suite, 3, 5); } /** * Return a suite of tests that are run with both client and embedded * * @param postfix suite name postfix * @return A suite of tests to be run with client and/or embedded */ private static Test baseSuite(String postfix) { TestSuite suite = new TestSuite("ClientAndEmbedded" + postfix); suite.addTest(new DataSourceTest("testGlobalLocalInterleaf")); suite.addTest(new DataSourceTest("testSetIsolationWithStatement")); suite.addTest(new DataSourceTest("testJira95xads")); suite.addTest(new DataSourceTest("testBadConnectionAttributeSyntax")); suite.addTest(new DataSourceTest("testDescriptionProperty")); suite.addTest(new DataSourceTest("testConnectionErrorEvent")); suite.addTest(new DataSourceTest("testReadOnlyToWritableTran")); suite.addTest(new DataSourceTest("testAutoCommitOnXAResourceStart")); suite.addTest(new DataSourceTest("testAllDataSources")); suite.addTest(new DataSourceTest("testClosedCPDSConnection")); suite.addTest(new DataSourceTest("testClosedXADSConnection")); suite.addTest(new DataSourceTest("testSetSchemaInXAConnection")); return suite; } /** * Return a suite of tests that are run with client only * * @return A suite of tests being run with client only */ private static Test getClientSuite() { TestSuite suite = new TestSuite("Client/Server"); suite.addTest(new DataSourceTest("testClientDSConnectionAttributes")); suite.addTest(new DataSourceTest( "testClientTraceFileDSConnectionAttribute")); suite.addTest(new DataSourceTest( "testClientMessageTextConnectionAttribute")); return suite; } /** * Return a suite of tests that are run with embedded only * * @param postfix suite name postfix * @return A suite of tests being run with embedded only */ private static Test getEmbeddedSuite(String postfix) { TestSuite suite = new TestSuite("Embedded" + postfix); suite.addTest(new DataSourceTest("testDSRequestAuthentication")); // Due to a bug following cannot be run for client - DERBY-3379 // To run this fixture with client, add to getClientSuite(), // when DERBY-3379 is fixed, remove from here (and client) and // move to baseSuite. suite.addTest(new DataSourceTest("testPooledReuseOnClose")); // when DERBY-2498 gets fixed, move these two to baseSuite suite.addTest(new DataSourceTest("testJira95ds")); suite.addTest(new DataSourceTest("testJira95pds")); // Following cannot run with client because of DERBY-2533; it hangs // when fixed, this can be moved to baseSuite. suite.addTest(new DataSourceTest("testReuseAcrossGlobalLocal")); suite.addTest(new DataSourceTest("testXAHoldability")); return suite; } public static Test suite() { if (JDBC.vmSupportsJSR169()) { // test uses unsupported classes like DriverManager, XADataSource, // ConnectionPoolDataSource, ConnectionEvenListenere, as well as // unsupported methods, like Connection.setTypeMap()... TestSuite suite = new TestSuite("DatasourceTest cannot run with JSR169"); return suite; } else { TestSuite suite = new TestSuite("DataSourceTest suite"); // Add tests that will run with both embedded suite.addTest(baseSuite(":embedded")); // and network server/client suite.addTest(TestConfiguration.clientServerDecorator( baseSuite(":client"))); // Add the tests that only run with client suite.addTest(TestConfiguration.clientServerDecorator( getClientSuite())); // Add the tests that only run with embedded suite.addTest(getEmbeddedSuite("embedded")); // Add the tests relying on getting timeouts. suite.addTest(getTimeoutSuite(":embedded")); suite.addTest(TestConfiguration.clientServerDecorator( getTimeoutSuite(":client"))); // wrap all in CleanDatabaseTestSetup that creates all database // objects any fixture might need. // Note that not all fixtures need (all of) these. return new CleanDatabaseTestSetup(suite) { /** * Create and populate database objects * * @see org.apache.derbyTesting.junit.CleanDatabaseTestSetup#decorateSQL(java.sql.Statement) */ protected void decorateSQL(Statement s) throws SQLException { s.executeUpdate("create table autocommitxastart(i int)"); s.executeUpdate("insert into autocommitxastart values 1,2,3,4,5"); s.executeUpdate("create schema SCHEMA_Patricio"); s.executeUpdate("create table " + "SCHEMA_Patricio.Patricio (id VARCHAR(255), value INTEGER)"); s.executeUpdate("create table intTable(i int)"); s.executeUpdate("create table hold_30 " + "(id int not null primary key, b char(30))"); s.executeUpdate( "create procedure checkConn2(in dsname varchar(20)) " + "parameter style java language java modifies SQL DATA " + "external name " + "'org.apache.derbyTesting.functionTests.tests.jdbcapi.DataSourceTest." + getNestedMethodName() + "'"); } }; } } public void tearDown() throws Exception { // attempt to get rid of any left-over trace files AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { for (int i=0 ; i < 6 ; i++) { String traceFileName = "trace" + (i+1) + ".out"; File traceFile = new File(traceFileName); if (traceFile.exists()) { // if it exists, attempt to get rid of it traceFile.delete(); } } return null; } }); super.tearDown(); } /* comment out. leaving in, just in case it's ever relevant. * when uncommented, this will run when network server tests are * started, and then reflect the results of the embedded checks. // perform security analysis of the public api for the embedded engine public void testDataSourceAPI() throws SQLException, ClassNotFoundException { SecurityCheck.report(); } */ /** * Test case for DERBY-3172 * When the Derby engine is shutdown or Network Server is brought down, any * api on JDBC Connection object should generate a Connection error event. */ public void testConnectionErrorEvent() throws SQLException, Exception { AssertEventCatcher aes12 = new AssertEventCatcher(12); ConnectionPoolDataSource ds = J2EEDataSource.getConnectionPoolDataSource(); PooledConnection pc = ds.getPooledConnection(); //Add a connection event listener to ConnectionPoolDataSource pc.addConnectionEventListener(aes12); Connection conn = pc.getConnection(); dropTable(conn, "TAB1"); //No event should have been generated at this point assertFalse(aes12.didConnectionClosedEventHappen()); assertFalse(aes12.didConnectionErrorEventHappen()); aes12.resetState(); //Shutdown the Derby engine or Network Server depending on what //mode we are running in. if (usingEmbedded()) { getTestConfiguration().shutdownDatabase(); } else { getTestConfiguration().stopNetworkServer(); } //Now try to use various apis on the JDBC Connection object created //before shutdown and they all should generate connection error event. try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)"); } catch (SQLException e) { //The first call on JDBC Connection object after Network Server //shutdown will generate a communication error and that's why we //are checking for SQL State 08006 rather than No current connection //SQL State 08003. In embedded mode, we will get SQL State 08003 //meaning No current connection if (usingEmbedded()) assertSQLState("08003", e); else assertSQLState("08006", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", 1); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { int[] columnIndexes = {1}; conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", columnIndexes); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { String[] columnNames = {"col1"}; conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", columnNames); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.createStatement(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareCall("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareCall("CREATE TABLE TAB1(COL1 INT NOT NULL)"); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareCall("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.nativeSQL("CREATE TABLE TAB1(COL1 INT NOT NULL)"); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getAutoCommit(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setAutoCommit(false); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getHoldability(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setHoldability(1); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.commit(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.rollback(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setSavepoint(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setSavepoint("savept1"); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.rollback((Savepoint)null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.releaseSavepoint((Savepoint)null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getTransactionIsolation(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getWarnings(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.clearWarnings(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getMetaData(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.isReadOnly(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setReadOnly(true); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setCatalog(null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getCatalog(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getTypeMap(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setTypeMap(null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); if (usingEmbedded()) { Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance(); }else { getTestConfiguration().startNetworkServer(); } // Get a new connection to the database conn = getConnection(); conn.close(); } /** * Test that a PooledConnection can be reused and closed * (separately) during the close event raised by the * closing of its logical connection. * DERBY-2142. * @throws SQLException * */ public void testPooledReuseOnClose() throws SQLException { // PooledConnection from a ConnectionPoolDataSource ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); subtestPooledReuseOnClose(cpds.getPooledConnection()); subtestPooledCloseOnClose(cpds.getPooledConnection()); // DERBY-3401 - removing a callback during a close causes problems. //subtestPooledRemoveListenerOnClose(cpds.getPooledConnection()); // PooledConnection from an XDataSource XADataSource xads = J2EEDataSource.getXADataSource(); subtestPooledReuseOnClose(xads.getXAConnection()); subtestPooledCloseOnClose(xads.getXAConnection()); // DERBY-3401 - removing a callback during a close causes problems. //subtestPooledRemoveListenerOnClose(xads.getXAConnection()); } /** * Tests that a pooled connection can successfully be reused * (a new connection obtained from it) during the processing * of its close event by its listener. * Sections 11.2 & 12.5 of JDBC 4 specification indicate that the * connection can be returned to the pool when the * ConnectionEventListener.connectionClosed() is called. */ private void subtestPooledReuseOnClose(final PooledConnection pc) throws SQLException { final Connection[] newConn = new Connection[1]; pc.addConnectionEventListener(new ConnectionEventListener() { /** * Mimic a pool handler that returns the PooledConnection * to the pool and then reallocates it to a new logical connection. */ public void connectionClosed(ConnectionEvent event) { PooledConnection pce = (PooledConnection) event.getSource(); assertSame(pc, pce); try { // open a new logical connection and pass // back to the fixture. newConn[0] = pce.getConnection(); } catch (SQLException e) { // Need to catch the exception here because // we cannot throw an exception through // the api method. fail(e.getMessage()); } } public void connectionErrorOccurred(ConnectionEvent event) { } }); // Open a connection then close it to trigger the // fetching of a new connection in the callback. Connection c1 = pc.getConnection(); c1.close(); // Fetch the connection created in the close callback Connection c2 = newConn[0]; assertNotNull(c2); // Ensure the connection is useable, this hit a NPE before DERBY-2142 // was fixed (for embedded). c2.createStatement().close(); pc.close(); } /** * Tests that a pooled connection can successfully be closed * during the processing of its close event by its listener. */ private void subtestPooledCloseOnClose(final PooledConnection pc) throws SQLException { pc.addConnectionEventListener(new ConnectionEventListener() { /** * Mimic a pool handler that closes the PooledConnection * (say it no longer needs it, pool size being reduced) */ public void connectionClosed(ConnectionEvent event) { PooledConnection pce = (PooledConnection) event.getSource(); assertSame(pc, pce); try { pce.close(); } catch (SQLException e) { // Need to catch the exception here because // we cannot throw an exception through // the api method. fail(e.getMessage()); } } public void connectionErrorOccurred(ConnectionEvent event) { } }); // Open and close a connection to invoke the logic above // through the callback pc.getConnection().close(); // The callback closed the actual pooled connection // so subsequent requests to get a logical connection // should fail. try { pc.getConnection(); fail("PooledConnection should be closed"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } } /** * Tests that a listener of a pooled connection can successfully * remove itself during the processing of its close event by its listener. */ private void subtestPooledRemoveListenerOnClose(final PooledConnection pc) throws SQLException { final int[] count1 = new int[1]; pc.addConnectionEventListener(new ConnectionEventListener() { /** * Mimic a pool handler that removes the listener during * a logical close. */ public void connectionClosed(ConnectionEvent event) { PooledConnection pce = (PooledConnection) event.getSource(); assertSame(pc, pce); count1[0]++; pce.removeConnectionEventListener(this); } public void connectionErrorOccurred(ConnectionEvent event) { } }); // and have another listener to ensure removing one leaves // the other working and intact. final int[] count2 = new int[1]; pc.addConnectionEventListener(new ConnectionEventListener() { /** * Mimic a pool handler that closes the PooledConnection * (say it no longer needs it, pool size being reduced) */ public void connectionClosed(ConnectionEvent event) { PooledConnection pce = (PooledConnection) event.getSource(); assertSame(pc, pce); count2[0]++; } public void connectionErrorOccurred(ConnectionEvent event) { } }); // no callback yet assertEquals(0, count1[0]); assertEquals(0, count2[0]); // Open and close a connection to invoke the logic above // through the callback pc.getConnection().close(); // one callback for each assertEquals(1, count1[0]); assertEquals(1, count2[0]); // the callback (count1) that was removed is not called on the // second close but the second callback (count2) is called. pc.getConnection().close(); assertEquals(1, count1[0]); assertEquals(2, count2[0]); pc.close(); } public void testAllDataSources() throws SQLException, Exception { Connection dmc = getConnection(); CallableStatement cs = dmc.prepareCall("call checkConn2(?)"); cs.setString(1,"Nested"); try { cs.execute(); } catch (SQLException sqle) { assertSQLState("40XC0", sqle); } cs.setString(1,"Nested2"); cs.execute(); String EmptyMapValue=null; // Note: currently, not supported String NullMapValue=null; String MapMapValue=null; if (usingEmbedded()) { EmptyMapValue="OK"; NullMapValue="XJ081"; MapMapValue="0A000"; } else if (usingDerbyNetClient()) { EmptyMapValue="0A000"; NullMapValue="0A000"; MapMapValue="0A000"; } Object[] expectedValues = { new Integer(ResultSet.HOLD_CURSORS_OVER_COMMIT), "XJ010", new Integer(2), new Boolean(true), new Boolean(false), EmptyMapValue, NullMapValue, MapMapValue}; assertConnectionOK(expectedValues, "DriverManager ", dmc); if (usingEmbedded()) assertTenConnectionsUnique(); DataSource dscs = JDBCDataSource.getDataSource(); if (usingEmbedded()) assertToString(dscs); DataSource ds = dscs; assertConnectionOK(expectedValues, "DataSource", ds.getConnection()); DataSource dssimple = null; // simple datasource is only supported with embedded if (usingEmbedded()) { EmbeddedSimpleDataSource realdssimple = new EmbeddedSimpleDataSource(); realdssimple.setDatabaseName(dbName); ds = realdssimple; dssimple = (DataSource)realdssimple; assertConnectionOK( expectedValues, "SimpleDataSource", ds.getConnection()); } ConnectionPoolDataSource dsp = J2EEDataSource.getConnectionPoolDataSource(); if (usingEmbedded()) assertToString(dsp); PooledConnection pc = dsp.getPooledConnection(); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity( pc, "javax.sql.PooledConnection"); } AssertEventCatcher aes1 = new AssertEventCatcher(1); pc.addConnectionEventListener(aes1); // DERBY-2531 // with Network Server / DerbyNetClient, the assertConnectionOK check // returns a different connection object... assertConnectionOK( expectedValues, "ConnectionPoolDataSource", pc.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes1.didConnectionClosedEventHappen()); assertFalse(aes1.didConnectionErrorEventHappen()); aes1.resetState(); assertConnectionOK( expectedValues, "ConnectionPoolDataSource", pc.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes1.didConnectionClosedEventHappen()); assertFalse(aes1.didConnectionErrorEventHappen()); aes1.resetState(); XADataSource dsx = J2EEDataSource.getXADataSource(); if (usingEmbedded()) assertToString(dsx); // shutdown db and check all's still ok thereafter TestConfiguration.getCurrent().shutdownDatabase(); dmc = getConnection(); cs = dmc.prepareCall("call checkConn2(?)"); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity( cs, "java.sql.CallableStatement"); } cs.setString(1,"Nested"); try { cs.execute(); } catch (SQLException sqle) { assertSQLState("40XC0", sqle); } cs.setString(1, "Nested2"); cs.execute(); XAConnection xac = dsx.getXAConnection(); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(xac, "javax.sql.XAConnection"); } AssertEventCatcher aes3 = new AssertEventCatcher(3); xac.addConnectionEventListener(aes3); assertConnectionOK( expectedValues, "XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes3.didConnectionClosedEventHappen()); assertFalse(aes3.didConnectionErrorEventHappen()); aes3.resetState(); pc = dsp.getPooledConnection(); AssertEventCatcher aes2 = new AssertEventCatcher(2); pc.addConnectionEventListener(aes2); assertConnectionOK( expectedValues, "ConnectionPoolDataSource", pc.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes2.didConnectionClosedEventHappen()); assertFalse(aes2.didConnectionErrorEventHappen()); aes2.resetState(); // test "local" XAConnections xac = dsx.getXAConnection(); AssertEventCatcher aes4 = new AssertEventCatcher(4); xac.addConnectionEventListener(aes4); assertConnectionOK( expectedValues, "XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes4.didConnectionClosedEventHappen()); assertFalse(aes4.didConnectionErrorEventHappen()); aes4.resetState(); assertConnectionOK( expectedValues, "XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes4.didConnectionClosedEventHappen()); assertFalse(aes4.didConnectionErrorEventHappen()); aes4.resetState(); xac.close(); // test "global" XAConnections xac = dsx.getXAConnection(); AssertEventCatcher aes5 = new AssertEventCatcher(5); xac.addConnectionEventListener(aes5); XAResource xar = xac.getXAResource(); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity( xar, "javax.transaction.xa.XAResource"); } Xid xid = new cdsXid(1, (byte) 35, (byte) 47); xar.start(xid, XAResource.TMNOFLAGS); Connection xacc = xac.getConnection(); xacc.close(); expectedValues[0] = new Integer(ResultSet.CLOSE_CURSORS_AT_COMMIT); if (usingEmbedded()) expectedValues[1] = "XJ058"; expectedValues[3] = new Boolean(false); assertConnectionOK( expectedValues, "Global XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); assertConnectionOK( expectedValues, "Global XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); xar.end(xid, XAResource.TMSUCCESS); expectedValues[0] = new Integer(ResultSet.HOLD_CURSORS_OVER_COMMIT); expectedValues[3] = new Boolean(true); assertConnectionOK(expectedValues, "Switch to local XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); assertConnectionOK(expectedValues, "Switch to local XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); Connection backtoGlobal = xac.getConnection(); xar.start(xid, XAResource.TMJOIN); expectedValues[0] = new Integer(ResultSet.CLOSE_CURSORS_AT_COMMIT); expectedValues[3] = new Boolean(false); assertConnectionOK(expectedValues, "Switch to global XADataSource", backtoGlobal); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); assertConnectionOK(expectedValues, "Switch to global XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); xar.end(xid, XAResource.TMSUCCESS); xar.commit(xid, true); xac.close(); } public void testClosedCPDSConnection() throws SQLException, Exception { // verify that outstanding updates from a closed connection, obtained // from a ConnectionPoolDataSource, are not committed, but rolled back. ConnectionPoolDataSource dsp = J2EEDataSource.getConnectionPoolDataSource(); PooledConnection pc = dsp.getPooledConnection(); Connection c1 = pc.getConnection(); Statement s = c1.createStatement(); // start by deleting all rows from intTable s.executeUpdate("delete from intTable"); c1.setAutoCommit(false); // this update should get rolled back later s.executeUpdate("insert into intTable values(1)"); // this should automatically close the original connection c1 = pc.getConnection(); ResultSet rs = c1.createStatement().executeQuery("select count(*) from intTable"); rs.next(); assertEquals(0, rs.getInt(1)); c1.close(); // check connection objects are closed once connection is closed try { rs.next(); fail("ResultSet is open for a closed connection obtained from PooledConnection"); } catch (SQLException sqle) { // 08003 - No current connection; XCL16 - ResultSet not open if (usingEmbedded()) assertSQLState("08003", sqle); else if (usingDerbyNetClient()) assertSQLState("XCL16", sqle); } try { s.executeUpdate("update intTable set i = 1"); fail("Statement is open for a closed connection " + "obtained from PooledConnection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } pc.close(); pc = null; PoolReset("ConnectionPoolDataSource", dsp.getPooledConnection()); s.close(); rs.close(); c1.close(); } public void testClosedXADSConnection() throws SQLException, Exception { // verify that outstanding updates from a closed connection, obtained // from an XADataSource, are not committed, but rolled back. XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); Connection c1 = xac.getConnection(); Statement s = c1.createStatement(); c1.setAutoCommit(false); // this update should be rolled back s.executeUpdate("insert into intTable values(2)"); c1 = xac.getConnection(); ResultSet rs = c1.createStatement().executeQuery( "select count(*) from intTable"); rs.next(); assertEquals(0, rs.getInt(1)); rs.close(); c1.close(); xac.close(); xac = null; PoolReset("XADataSource", dsx.getXAConnection()); } public void testGlobalLocalInterleaf() throws SQLException, XAException { // now some explicit tests for how connection state behaves // when switching between global transactions and local // and setting connection state. // some of this may be tested elsewhere too. XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); AssertEventCatcher aes6 = new AssertEventCatcher(6); xac.addConnectionEventListener(aes6); XAResource xar = xac.getXAResource(); Xid xid = new cdsXid(1, (byte) 93, (byte) 103); // series 1 - Single connection object Connection cs1 = xac.getConnection(); // initial local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); xar.start(xid, XAResource.TMNOFLAGS); // initial X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, false, cs1); cs1.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); cs1.setReadOnly(true); setHoldability(cs1, false); // close cursors // modified X1 boolean ReadOnly = false; // see DERBY-911, ReadOnly state different for Embedded/DerbyNetClient if (usingEmbedded()) ReadOnly = true; assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // the underlying local transaction/connection must pick up the // state of the Connection handle cs1 // modified local: assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, ReadOnly, cs1); cs1.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); cs1.setReadOnly(false); setHoldability(cs1, false); // close cursors // reset local assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); // now re-join the transaction, should pick up the read-only // and isolation level from the transaction, // holdability remains that of this handle. xar.start(xid, XAResource.TMJOIN); // re-join X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // back to local - should be the same as the reset local assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); // test suspend/resume // now re-join the transaction (X1) for the second time, should pick // up the read-only and isolation level from the transaction, // holdability remains that of this handle. xar.start(xid, XAResource.TMJOIN); assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUSPEND); // local after suspend assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); xar.start(xid, XAResource.TMRESUME); // resume X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // back to local (second time) assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); cs1.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); cs1.setReadOnly(true); setHoldability(cs1, true); // hold //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); cs1.close(); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); cs1 = xac.getConnection(); // new handle - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); cs1.close(); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); xar.start(xid, XAResource.TMJOIN); cs1 = xac.getConnection(); // re-join with new handle X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); cs1.close(); xar.end(xid, XAResource.TMSUCCESS); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); // now get a connection (attached to a local) // attach to the global and commit it. // state should be that of the local after the commit. cs1 = xac.getConnection(); cs1.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); // pre-X1 commit - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, cs1); xar.start(xid, XAResource.TMJOIN); // pre-X1 commit - X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // post-X1 end - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, cs1); xar.commit(xid, true); // post-X1 commit - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, cs1); //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); cs1.close(); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); } // really part of testGlobalLocalInterLeaf: /** * @throws SQLException * @throws XAException */ public void testSetIsolationWithStatement() throws SQLException, XAException { // DERBY-421 Setting isolation level with SQL was not getting // handled correctly // Some more isolation testing using SQL and JDBC api XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); AssertEventCatcher aes6 = new AssertEventCatcher(6); xac.addConnectionEventListener(aes6); XAResource xar = xac.getXAResource(); Connection conn = xac.getConnection(); Statement s = conn.createStatement(); // initial local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, conn); // Issue setTransactionIsolation in local transaction conn.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); // setTransactionIsolation in local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); Xid xid; //Issue SQL to change isolation in local transaction s.executeUpdate("set current isolation = RR"); assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_SERIALIZABLE, true, false, conn); xid = new cdsXid(1, (byte) 35, (byte) 47); xar.start(xid, XAResource.TMNOFLAGS); // 1st global (new) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); // local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_SERIALIZABLE, true, false, conn); //Issue SQL to change isolation in local transaction s.executeUpdate("set current isolation = RS"); assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, conn); // DERBY-1325 - Isolation level of local connection does not get reset after ending // a global transaction that was joined/resumed if the isolation level was changed // using SQL xar.start(xid, XAResource.TMJOIN); // 1st global(existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); // local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, conn); // DERBY-1325 end test Xid xid2 = new cdsXid(1, (byte) 93, (byte) 103); xar.start(xid2, XAResource.TMNOFLAGS); // 2nd global (new) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, false, false, conn); xar.end(xid2, XAResource.TMSUCCESS); xar.start(xid, XAResource.TMJOIN); // 1st global (existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); //local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, conn); xar.start(xid, XAResource.TMJOIN); // 1st global (existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); // Issue SQL to change isolation in 1st global transaction s.executeUpdate("set current isolation = UR"); assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); // local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); xar.start(xid2, XAResource.TMJOIN); // 2nd global (existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, false, false, conn); xar.end(xid2, XAResource.TMSUCCESS); xar.rollback(xid2); // (After 2nd global rollback ) local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); xar.rollback(xid); // (After 1st global rollback) local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); } // This test includes some short-hand descriptions of the test cases // left in for reference to the original non-junit test public void testReuseAcrossGlobalLocal() throws SQLException, XAException { // DERBY-2533 - // network server cannot run this test - it hits a protocol error // on tearDown. Embedded requires a database shutdown if (usingDerbyNetClient()) return; int[] onetwothree = {1,2,3}; int[] three = {3}; int[] pspc = {1, 4}; // expected parameter count for prepared statements int[] cspc = {2, 12, 12}; // for callable statements // statics for testReuseAcrossGlobalLocal int[] StatementExpectedValues = { ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.FETCH_REVERSE, 444, 713, 19, ResultSet.HOLD_CURSORS_OVER_COMMIT}; //ResultSet.CLOSE_CURSORS_AT_COMMIT}; int[] PreparedStatementExpectedValues = { ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.FETCH_REVERSE, 888, 317, 91, ResultSet.HOLD_CURSORS_OVER_COMMIT}; int[] CallableStatementExpectedValues = { ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.FETCH_REVERSE, 999, 137, 85, ResultSet.HOLD_CURSORS_OVER_COMMIT}; XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); AssertEventCatcher aes6 = new AssertEventCatcher(6); xac.addConnectionEventListener(aes6); XAResource xar = xac.getXAResource(); Xid xid = new cdsXid(1, (byte) 103, (byte) 119); // now check re-use of *Statement objects across local/global // connections. Connection cs1 = xac.getConnection(); // ensure read locks stay around until end-of transaction cs1.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); cs1.setAutoCommit(false); assertLocks(null, cs1); Statement sru1 = cs1.createStatement(); sru1.setCursorName("SN1"); sru1.executeUpdate("insert into intTable values 1,2,3"); Statement sruBatch = cs1.createStatement(); sruBatch.setCursorName("sruBatch"); Statement sruState = createFloatStatementForStateChecking( StatementExpectedValues, cs1); PreparedStatement psruState = createFloatStatementForStateChecking( new int[] {1, 4}, PreparedStatementExpectedValues, cs1, "select i from intTable where i = ?"); CallableStatement csruState = createFloatCallForStateChecking( new int[] {2, 12, 12}, CallableStatementExpectedValues, cs1, "CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(?,?)"); PreparedStatement psParams = cs1.prepareStatement("select * from intTable where i > ?"); psParams.setCursorName("params"); psParams.setInt(1, 2); // Params-local-1 resultSetQuery("params", three, psParams.executeQuery()); sruBatch.addBatch("insert into intTable values 4"); // sru1-local-1 queryOnStatement("SN1", onetwothree, cs1, sru1); cs1.commit(); // need to commit to switch to an global connection; // simple case - underlying connection is re-used for global. xar.start(xid, XAResource.TMNOFLAGS); // Expecting downgrade because global transaction sru1-global-2 is // using a statement with holdability true // sru1-global-2 queryOnStatement("SN1", onetwothree, cs1, sru1); sruBatch.addBatch("insert into intTable values 5"); Statement sru2 = cs1.createStatement(); sru2.setCursorName("OAK2"); //sru2-global-3 queryOnStatement("OAK2", onetwothree, cs1, sru2); // Expecting downgrade because global transaction sru1-global-4 is // using a statement with holdability true // sru1-global-4 queryOnStatement("SN1", onetwothree, cs1, sru1); // Global statement StatementExpectedValues[6] = ResultSet.CLOSE_CURSORS_AT_COMMIT; PreparedStatementExpectedValues[6] = ResultSet.CLOSE_CURSORS_AT_COMMIT; CallableStatementExpectedValues[6] = ResultSet.CLOSE_CURSORS_AT_COMMIT; assertStatementState(null, StatementExpectedValues ,sruState); // Global PreparedStatement assertStatementState(pspc, PreparedStatementExpectedValues, psruState); // Global CallableStatement assertStatementState(cspc, CallableStatementExpectedValues, csruState); // Params-global-1 resultSetQuery("params", three, psParams.executeQuery()); xar.end(xid, XAResource.TMSUCCESS); // now a new underlying connection is created // sru1-local-5 queryOnStatement("SN1", onetwothree, cs1, sru1); // sru2-local-6 queryOnStatement("OAK2", onetwothree, cs1, sru2); sruBatch.addBatch("insert into intTable values 6,7"); Statement sru3 = cs1.createStatement(); sru3.setCursorName("SF3"); // sru3-local-7 queryOnStatement("SF3", onetwothree, cs1, sru3); // Two transactions should hold locks (global and the current XA); // LOCAL StatementExpectedValues[6] = ResultSet.HOLD_CURSORS_OVER_COMMIT; PreparedStatementExpectedValues[6] = ResultSet.HOLD_CURSORS_OVER_COMMIT; CallableStatementExpectedValues[6] = ResultSet.HOLD_CURSORS_OVER_COMMIT; assertStatementState(null, StatementExpectedValues, sruState); assertStatementState(pspc, PreparedStatementExpectedValues, psruState); assertStatementState(cspc, CallableStatementExpectedValues, csruState); // Params-local-2 resultSetQuery("params", three, psParams.executeQuery()); assertLocks(new int[] {14,14}, cs1); cs1.commit(); //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); // attach the XA transaction to another connection and see what happens XAConnection xac2 = dsx.getXAConnection(); AssertEventCatcher aes5 = new AssertEventCatcher(5); xac2.addConnectionEventListener(aes5); XAResource xar2 = xac2.getXAResource(); xar2.start(xid, XAResource.TMJOIN); Connection cs2 = xac2.getConnection(); // these statements were generated by cs1 and thus are still // in a local connection. // sru1-local-8 queryOnStatement("SN1", onetwothree, cs1, sru1); // sru2-local-9 queryOnStatement("OAK2", onetwothree, cs1, sru2); // sru3-local-10 queryOnStatement("SF3", onetwothree, cs1, sru3); sruBatch.addBatch("insert into intTable values 8"); // LOCAL 2 assertStatementState(null, StatementExpectedValues, sruState); assertStatementState(pspc, PreparedStatementExpectedValues, psruState); assertStatementState(cspc, CallableStatementExpectedValues, csruState); assertLocks(new int[] {14, 12}, cs1); int[] updateCounts = sruBatch.executeBatch(); int[] expectedUpdateCounts = {1, 1, 2, 1}; // sruBatch update counts: for (int i = 0; i < updateCounts.length; i++) { assertEquals(expectedUpdateCounts[i], updateCounts[i]); } // sruBatch queryOnStatement( "sruBatch", new int[] {1,2,3,4,5,6,7,8}, cs1, sruBatch); xar2.end(xid, XAResource.TMSUCCESS); //Confirm - no connection closed event & connection error event assertFalse(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); xac2.close(); // allow close on already closed XAConnection xac2.close(); xac2.addConnectionEventListener(null); xac2.removeConnectionEventListener(null); // test methods against a closed XAConnection and its resource try { xac2.getXAResource(); // DERBY-2532 // Network Server does not think this is worth an exception. if (usingEmbedded()) fail("expected SQLException on " + "closed XAConnection.getXAResource"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } try { xac2.getConnection(); fail ("expected SQLException on XAConnection.getConnection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } try { xar2.start(xid, XAResource.TMJOIN); fail ("expected XAException on XAResource.TMJOIN"); } catch (XAException xae) { assertXAException("XAResource.start", xae); } try { xar2.end(xid, XAResource.TMJOIN); fail ("expected XAException on XAResource.TMJOIN"); } catch (XAException xae) { assertXAException("XAResource.end", xae); } try { xar2.commit(xid, true); fail ("expected XAException on XAResource.commit"); } catch (XAException xae) { assertXAException("XAResource.commit", xae); } try { xar2.prepare(xid); fail ("expected XAException on XAResource.prepare"); } catch (XAException xae) { assertXAException("XAResource.prepare", xae); } try { xar2.recover(0); fail ("expected XAException on XAResource.recover"); } catch (XAException xae) { assertXAException("XAResource.recover", xae); } try { xar2.prepare(xid); fail ("expected XAException on XAResource.prepare"); } catch (XAException xae) { assertXAException("XAResource.prepare", xae); } try { xar2.isSameRM(xar2); fail ("expected XAException on XAResource.isSameRM"); } catch (XAException xae) { assertXAException("XAResource.isSameRM", xae); } // close everything cs1.rollback(); sruState.close(); psruState.close(); csruState.close(); psParams.close(); sruBatch.close(); sru1.close(); sru2.close(); sru3.close(); cs1.close(); cs2.close(); xac.removeConnectionEventListener(null); xac.close(); xac2.close(); // but, still not enough. // what with all the switching between global and local transactions // we still have a lock open on intTable, which will interfere with // our tearDown efforts. Bounce the database. TestConfiguration.getCurrent().shutdownDatabase(); } public void testSetSchemaInXAConnection() throws SQLException { // tests that set schema works correctly in an XA connection. XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac3 = dsx.getXAConnection(); Connection conn3 = xac3.getConnection(); Statement st3 = conn3.createStatement(); st3.execute("SET SCHEMA SCHEMA_Patricio"); st3.close(); PreparedStatement ps3 = conn3.prepareStatement("INSERT INTO Patricio VALUES (?, ?)"); ps3.setString(1, "Patricio"); ps3.setInt(2, 3); ps3.executeUpdate(); assertEquals(1, ps3.getUpdateCount()); ps3.close(); conn3.close(); xac3.close(); } // test that an xastart in auto commit mode commits the existing work. // test fix of a bug ('beetle 5178') wherein XAresource.start() when // auto-commit is true did not implictly commit any transaction // Also tests DERBY-1025, same description, but for client. public void testAutoCommitOnXAResourceStart() throws SQLException, XAException { XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac4 = dsx.getXAConnection(); Xid xid4a= null; // We get an XAID_DUP error from networkserver when attempting // the XAResource.start below if we use the same xid. // Possibly because we're in the same jvm. // When the test is run with clientserverSuite, rather than default, // this wasn't needed, so just create a different id for client if (usingEmbedded()) xid4a = new cdsXid(4, (byte) 23, (byte) 76); else if (usingDerbyNetClient()) xid4a = new cdsXid(5, (byte) 23, (byte) 76); Connection conn4 = xac4.getConnection(); assertTrue(conn4.getAutoCommit()); Statement s4 = conn4.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); ResultSet rs4 = s4.executeQuery("select i from autocommitxastart"); rs4.next(); assertEquals(1, rs4.getInt(1)); rs4.next(); assertEquals(2, rs4.getInt(1)); // XAResource().start should commit the transaction xac4.getXAResource().start(xid4a, XAResource.TMNOFLAGS); xac4.getXAResource().end(xid4a, XAResource.TMSUCCESS); try { rs4.next(); fail ("expected an exception indicating resultset is closed."); } catch (SQLException sqle) { // Embedded gets 08003. No current connection (DERBY-2620) if (usingDerbyNetClient()) assertSQLState("XCL16",sqle); } conn4.setAutoCommit(false); assertFalse(conn4.getAutoCommit()); rs4 = s4.executeQuery("select i from autocommitxastart"); rs4.next(); assertEquals(1, rs4.getInt(1)); rs4.next(); assertEquals(2, rs4.getInt(1)); // Get a new xid to begin another transaction. if (usingEmbedded()) xid4a = new cdsXid(4, (byte) 93, (byte) 103); else if (usingDerbyNetClient()) xid4a = new cdsXid(5, (byte) 93, (byte) 103); try { xac4.getXAResource().start(xid4a, XAResource.TMNOFLAGS); } catch (XAException xae) { if (usingEmbedded()) assertNull(xae.getMessage()); else if (usingDerbyNetClient()) { // This should give XAER_OUTSIDE exception because // the resource manager is busy in the local transaction assertTrue(xae.getMessage().indexOf("XAER_OUTSIDE") >=0 ); } assertEquals(-9, xae.errorCode); } rs4.next(); assertEquals(3, rs4.getInt(1)); rs4.close(); conn4.rollback(); conn4.close(); xac4.close(); } public void testReadOnlyToWritableTran() throws SQLException, Exception { // This fixture will run twice, once with embedded, once with client, // and insert 2 rows in addition to the 5 rows inserted during setup. // The fixture tests a commit, so before running, try to remove row // 6 and 7 in case this is the second run of the fixture. Statement s = createStatement(); s.executeUpdate("delete from autocommitxastart where i = 6"); s.executeUpdate("delete from autocommitxastart where i = 7"); // TESTING READ_ONLY TRANSACTION FOLLOWED BY WRITABLE TRANSACTION // Test following sequence of steps // 1)start a read-only global transaction // 2)finish that read-only transaction // 3)start another global transaction XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac5 = dsx.getXAConnection(); Xid xid5a = new cdsXid(5, (byte) 119, (byte) 129); Connection conn5 = xac5.getConnection(); Statement sru5a = conn5.createStatement(); XAResource xar = xac5.getXAResource(); xar.start(xid5a, XAResource.TMNOFLAGS); conn5.setReadOnly(true); // Read-Only XA transaction; // holdability: (hold, or close cursors over commit) , // transaction isolation: read-committed, // auto-commit false, read-only true (with embedded) if (usingEmbedded()) { assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, true, conn5); } // Note: the original test had no comments about this difference // between Embedded and DerbyNetClient, this has apparently // been accepted behavior. else if (usingDerbyNetClient()) { assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, false, conn5); } ResultSet rs5 = sru5a.executeQuery( "select count(*) from autocommitxastart"); rs5.next(); assertEquals(5, rs5.getInt(1)); rs5.close(); xar.end(xid5a, XAResource.TMSUCCESS); xar.commit(xid5a, true); conn5.close(); //now start a new transaction conn5 = xac5.getConnection(); sru5a = conn5.createStatement(); xar.start(xid5a, XAResource.TMNOFLAGS); // Writeable XA transaction // holdability: (hold, or close cursors over commit) , // transaction isolation: read-committed, // auto-commit false, read-only false assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, false, conn5); sru5a.executeUpdate("insert into autocommitxastart values 6,7"); rs5 = sru5a.executeQuery("select count(*) from autocommitxastart"); rs5.next(); assertEquals(7, rs5.getInt(1)); xar.end(xid5a, XAResource.TMSUCCESS); xar.commit(xid5a, true); conn5.close(); xac5.close(); sru5a.close(); } // test jira-derby 95 - a NullPointerException was returned when passing // an incorrect database name, should now give error: // XCY00 - invalid valid for property ... // with DataSource public void testJira95ds() throws SQLException { try { DataSource ds = JDBCDataSource.getDataSource(); // non-existent database JDBCDataSource.setBeanProperty(ds, "databaseName", "jdbc:derby:wombat"); ds.getConnection(); fail ("expected an SQLException!"); } catch (SQLException sqle) { // DERBY-2498: with client, getting a NullPointerException. // Note also: the NPE does not occur with XADataSource - see // testJira95xads(). if (usingEmbedded()) assertSQLState("XCY00", sqle); } catch (Exception e) { // DERBY-2498, when fixed, remove 'if' if (usingEmbedded()) fail ("unexpected exception: " + e.toString()); } } // test jira-derby 95 - a NullPointerException was returned when passing // an incorrect database name, should now give error XCY00 // with ConnectionPoolDataSource public void testJira95pds() throws SQLException { try { ConnectionPoolDataSource pds = J2EEDataSource.getConnectionPoolDataSource(); JDBCDataSource.setBeanProperty(pds, "databaseName", "jdbc:derby:boo"); pds.getPooledConnection(); fail ("expected an SQLException!"); } catch (SQLException sqle) { // DERBY-2498 - when fixed, remove if if (usingEmbedded()) assertSQLState("XCY00", sqle); } catch (Exception e) { // DERBY-2498 - when fixed, remove if if (usingEmbedded()) fail ("unexpected exception: " + e.toString()); } } // test jira-derby 95 - a NullPointerException was returned when passing // an incorrect database name, should now give error XCY00 // with XADataSource public void testJira95xads() throws SQLException { try { XADataSource dxs = J2EEDataSource.getXADataSource(); JDBCDataSource.setBeanProperty(dxs, "databaseName", "jdbc:derby:boo"); dxs.getXAConnection().getConnection(); fail ("expected an SQLException!"); } catch (SQLException sqle) { assertSQLState("XCY00", sqle); } catch (Exception e) { fail ("unexpected exception: " + e.toString()); } } public void testBadConnectionAttributeSyntax() throws SQLException { // DataSource - bad connattr syntax DataSource ds = JDBCDataSource.getDataSource(); JDBCDataSource.setBeanProperty(ds, "ConnectionAttributes", "bad"); try { ds.getConnection(); fail ("should have seen an error"); } catch (SQLException e) { if (usingEmbedded()) assertSQLState("XJ028", e); else if (usingDerbyNetClient()) assertSQLState("XJ212", e); } // ConnectionPoolDataSource - bad connatr syntax ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); JDBCDataSource.setBeanProperty(cpds, "ConnectionAttributes", "bad"); try { cpds.getPooledConnection(); fail ("should have seen an error"); } catch (SQLException e) { assertSQLState("XJ028", e); } // XADataSource - bad connattr syntax"); XADataSource xads = J2EEDataSource.getXADataSource(); JDBCDataSource.setBeanProperty(xads, "ConnectionAttributes", "bad"); try { xads.getXAConnection(); fail ("should have seen an error"); } catch (SQLException e) { assertSQLState("XJ028", e); } } // End testBadConnectionAttributeSyntax /** * Check that database name set using setConnectionAttributes is not used * by ClientDataSource. This method tests DERBY-1130. * * @throws SQLException */ public void testClientDSConnectionAttributes() throws SQLException { if (usingEmbedded()) return; ClientDataSource ds = new ClientDataSource(); // DataSource - EMPTY; expect error 08001 in all cases // 08001: Required Derby DataSource property databaseName not set. dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, ds); // DataSource - connectionAttributes=databaseName=<valid name> ds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, ds); ds.setConnectionAttributes(null); // Test that (invalid) database name specified in connection // attributes is not used // DataSource - databaseName=<valid db> and // connectionAttributes=databaseName=kangaroo ds.setConnectionAttributes("databaseName=kangaroo"); ds.setDatabaseName(dbName); dsConnectionRequests(new String[] {"OK","08001","OK","OK", "08001","08001","OK","OK","OK"}, ds); ds.setConnectionAttributes(null); ds.setDatabaseName(null); // now with ConnectionPoolDataSource ClientConnectionPoolDataSource cpds = new ClientConnectionPoolDataSource(); // ConnectionPoolDataSource - EMPTY dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (ConnectionPoolDataSource)cpds); // ConnectionPoolDataSource // - connectionAttributes=databaseName=<valid dbname> cpds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (ConnectionPoolDataSource)cpds); cpds.setConnectionAttributes(null); // Test that database name specified in connection attributes is // not used // ConnectionPoolDataSource - databaseName=wombat and // connectionAttributes=databaseName=kangaroo cpds.setConnectionAttributes("databaseName=kangaroo"); cpds.setDatabaseName(dbName); dsConnectionRequests(new String[] {"OK","08001","OK","OK","08001","08001","OK","OK","OK"}, (ConnectionPoolDataSource)cpds); cpds.setConnectionAttributes(null); cpds.setDatabaseName(null); // now with XADataSource ClientXADataSource xads = new ClientXADataSource(); // XADataSource - EMPTY dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (XADataSource) xads); // XADataSource - connectionAttributes=databaseName=<valid dbname> xads.setConnectionAttributes("databaseName=wombat"); dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (XADataSource) xads); xads.setConnectionAttributes(null); // Test that database name specified in connection attributes is not used // XADataSource - databaseName=wombat and // connectionAttributes=databaseName=kangaroo xads.setConnectionAttributes("databaseName=kangaroo"); xads.setDatabaseName("wombat"); dsConnectionRequests(new String[] {"OK","08001","OK","OK","08001","08001","OK","OK","OK"}, (XADataSource) xads); xads.setConnectionAttributes(null); xads.setDatabaseName(null); } // End testClientDSConnectionAttributes // Following test is similar to testClientDSConnectionAttributes, but // for embedded datasources. // This subtest does not run for network server, it uses // setAttributesAsPassword, which isn't supported for client datasources. public void testDSRequestAuthentication() throws SQLException { if (usingDerbyNetClient()) return; EmbeddedDataSource ds = new EmbeddedDataSource(); // DataSource - EMPTY dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, ds); // DataSource - connectionAttributes=databaseName=wombat"); ds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setConnectionAttributes(null); // DataSource - attributesAsPassword=true"); ds.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); // DataSource - attributesAsPassword=true, // connectionAttributes=databaseName=kangaroo"); ds.setAttributesAsPassword(true); ds.setConnectionAttributes("databaseName=kangaroo"); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); ds.setConnectionAttributes(null); // Enable Authentication; setDatabaseProperty("derby.user.fred", "wilma"); setDatabaseProperty("derby.user.APP", "APP"); setDatabaseProperty("derby.authentication.provider", "BUILTIN"); setDatabaseProperty("derby.connection.requireAuthentication", "true"); ds.setShutdownDatabase("shutdown"); try { ds.getConnection(); } catch (SQLException sqle) { assertSQLState("XJ015", sqle); } ds.setDatabaseName(null); ds.setShutdownDatabase(null); // "AUTHENTICATION NOW ENABLED"); // DataSource - attributesAsPassword=true ds.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); // ensure the DS property password is not treated as a set of // attributes. // DataSource - attributesAsPassword=true, user=fred, // password=databaseName=wombat;password=wilma ds.setAttributesAsPassword(true); ds.setUser("fred"); ds.setPassword("databaseName=" + dbName + ";password=wilma"); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); ds.setUser(null); ds.setPassword(null); ds = null; // now with ConnectionPoolDataSource EmbeddedConnectionPoolDataSource cpds = new EmbeddedConnectionPoolDataSource(); // ConnectionPoolDataSource - EMPTY dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); // ConnectionPoolDataSource - // connectionAttributes=databaseName=wombat cpds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); cpds.setConnectionAttributes(null); // ConnectionPoolDataSource - attributesAsPassword=true cpds.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); cpds.setAttributesAsPassword(false); // ensure the DS property password is not treated as a set of // attributes. // ConnectionPoolDataSource - attributesAsPassword=true, // user=fred, password=databaseName=wombat;password=wilma"); cpds.setAttributesAsPassword(true); cpds.setUser("fred"); cpds.setPassword("databaseName=" + dbName + ";password=wilma"); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); cpds.setAttributesAsPassword(false); cpds.setUser(null); cpds.setPassword(null); cpds = null; // now with XADataSource EmbeddedXADataSource xads = new EmbeddedXADataSource(); // XADataSource - EMPTY dsConnectionRequests(new String[] { "08006","08006","08006","08006", "08006","08006","08006","08006","08006"}, (XADataSource) xads); // XADataSource - databaseName=wombat xads.setDatabaseName(dbName); dsConnectionRequests(new String[] { "08004","08004","08004","OK", "08004","08004","08004","08004","08004"}, (XADataSource) xads); xads.setDatabaseName(null); // XADataSource - connectionAttributes=databaseName=wombat"); xads.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] { "08006","08006","08006","08006", "08006","08006","08006","08006","08006"}, (XADataSource) xads); xads.setConnectionAttributes(null); // XADataSource - attributesAsPassword=true xads.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "08006","08006","08006","08006", "08006","08006","08006","08006","08006"}, (XADataSource) xads); xads.setAttributesAsPassword(false); // XADataSource - databaseName=wombat, attributesAsPassword=true xads.setDatabaseName(dbName); xads.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "08004","08004","08004","XJ028", "XJ028","08004","08004","OK","08004"}, (XADataSource) xads); xads.setAttributesAsPassword(false); xads.setDatabaseName(null); setDatabaseProperty("derby.connection.requireAuthentication", "false"); TestConfiguration.getCurrent().shutdownDatabase(); } /** * Check that traceFile connection attribute functions correctly. * tracefile was tested in checkDriver, but not for DataSources. * tracefile= was used in datasourcepermissions_net, but that's * incorrect syntax. Note that we're not checking the contents of * the tracefile. * * Note also that this test cannot run against a remote server. * * @throws SQLException */ public void testClientTraceFileDSConnectionAttribute() throws SQLException { if (usingEmbedded()) return; String traceFile; // DataSource DataSource ds = JDBCDataSource.getDataSource(); // DataSource - setTransationAttributes traceFile = "trace1.out"; JDBCDataSource.setBeanProperty(ds, "connectionAttributes", "traceFile="+traceFile); // In this scenario, we *only* get a tracefile, if we first get a // successful connection, followed by an unsuccessful connection. // So, we cannot just use ds.getConnection() dsGetBadConnection(ds); JDBCDataSource.clearStringBeanProperty(ds, "connectionAttributes"); // DataSource - setTraceFile traceFile = "trace2.out"; JDBCDataSource.setBeanProperty(ds, "traceFile", traceFile); ds.getConnection(); ds = null; // now with ConnectionPoolDataSource ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); traceFile = "trace3.out"; JDBCDataSource.setBeanProperty(cpds, "connectionAttributes", "traceFile="+traceFile); // DERBY-2468 - trace3.out does not get created ((ClientConnectionPoolDataSource) cpds).getConnection(); JDBCDataSource.clearStringBeanProperty(cpds, "connectionAttributes"); traceFile = "trace4.out"; JDBCDataSource.setBeanProperty(cpds, "traceFile", traceFile); ((ClientConnectionPoolDataSource) cpds).getConnection(); cpds = null; // now with XADataSource XADataSource xads = J2EEDataSource.getXADataSource(); traceFile = "trace5.out"; JDBCDataSource.setBeanProperty(xads, "connectionAttributes", "traceFile="+traceFile); ((ClientXADataSource) xads).getConnection(); // DERBY-2468 - trace5.out does not get created JDBCDataSource.clearStringBeanProperty(xads, "connectionAttributes"); traceFile = "trace6.out"; JDBCDataSource.setBeanProperty(xads, "traceFile", traceFile); ((ClientXADataSource) xads).getConnection(); assertTraceFilesExist(); } /* -- Helper Methods for testClientTraceFileDSConnectionAttribute -- */ private static void dsGetBadConnection(DataSource ds) { // first get a good connection, or we don't get a // traceFile when using connectionattributes. // also, we do not get a tracefile that way unless // we see an error. // with setTraceFile, we *always* get a file, even // with just a successful connection. try { ds.getConnection(); ds.getConnection(null, null); fail("expected an sqlException"); } catch (SQLException sqle) { assertSQLState("08001", sqle); } } /** * Check that trace file exists in <framework> directory */ private static void assertTraceFilesExist() { AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { for (int i=0 ; i < 6 ; i++) { String traceFileName = "trace" + (i+1) + ".out"; File traceFile = new File(traceFileName); if (i == 2 || i == 4) continue; else { assertTrue(traceFile.exists()); } } return null; } }); } /** * Check that messageText connection attribute functions correctly. * retrievemessagetext was tested in checkdriver, and derbynet/testij, * but not tested for datasources, and in datasourcepermissions_net, * but as it has nothing to do with permissions/authentication, * this test seems a better place for it. * * @throws SQLException */ public void testClientMessageTextConnectionAttribute() throws SQLException { if (usingEmbedded()) return; String retrieveMessageTextProperty = "retrieveMessageText"; Connection conn; // DataSource // DataSource - retrieveMessageTextProperty ClientDataSource ds = new ClientDataSource(); ds.setDatabaseName(dbName); ds.setConnectionAttributes(retrieveMessageTextProperty + "=false"); conn = ds.getConnection(); assertMessageText(conn,"false"); conn.close(); // now try with retrieveMessageText = true ds.setConnectionAttributes(retrieveMessageTextProperty + "=true"); conn = ds.getConnection(); assertMessageText(conn,"true"); ds.setConnectionAttributes(null); conn.close(); // now with ConnectionPoolDataSource // ConnectionPoolDataSource - retrieveMessageTextProperty ClientConnectionPoolDataSource cpds = new ClientConnectionPoolDataSource(); cpds.setDatabaseName(dbName); cpds.setConnectionAttributes( retrieveMessageTextProperty + "=false"); conn = cpds.getConnection(); assertMessageText(conn,"false"); conn.close(); cpds.setConnectionAttributes( retrieveMessageTextProperty + "=true"); conn = cpds.getConnection(); assertMessageText(conn,"true"); cpds.setConnectionAttributes(null); conn.close(); // now with XADataSource ClientXADataSource xads = new ClientXADataSource(); //XADataSource - retrieveMessageTextProperty xads.setDatabaseName(dbName); xads.setConnectionAttributes( retrieveMessageTextProperty + "=false"); conn = xads.getConnection(); assertMessageText(conn,"false"); conn.close(); xads.setConnectionAttributes( retrieveMessageTextProperty + "=true"); conn = xads.getConnection(); assertMessageText(conn,"true"); conn.close(); xads.setConnectionAttributes(null); } /* -- Helper Method for testClientMessageTextDSConnectionAttribute -- */ private static void assertMessageText( Connection conn, String retrieveMessageTextValue) throws SQLException { try { conn.createStatement().executeQuery("SELECT * FROM APP.NOTTHERE"); } catch (SQLException e) { assertSQLState("42X05", e); if (retrieveMessageTextValue.equals("true") ) { assertTrue(e.getMessage().indexOf("does not exist") >= 0); } else { // retrieveMessageTextValue is false assertTrue(e.getMessage().indexOf("does not exist") == -1); } } } /** * Check that messageText connection attribute functions correctly. * retrievemessagetext was tested in checkdriver, and derbynet/testij * (but not tested for datasources), and in datasourcepermissions_net, * but as it has nothing to do with permissions/authentication, * this test seems a better place for it. * * @throws SQLException */ public void testDescriptionProperty() throws SQLException, Exception { // DataSource - setDescription subTestDataSourceDescription(JDBCDataSource.getDataSource()); // ConnectionPoolDataSource - setDescription subTestDataSourceDescription( (DataSource) J2EEDataSource.getConnectionPoolDataSource()); // XADataSource - setDescription subTestDataSourceDescription( (DataSource) J2EEDataSource.getXADataSource()); } /** * Utility method for testing setting and fetching the description * property on a data source. */ private void subTestDataSourceDescription(DataSource ds) throws Exception { String setDescription = "Everything you ever wanted to know about this datasource"; JDBCDataSource.setBeanProperty(ds, "description", setDescription); ds.getConnection(); assertEquals(setDescription, JDBCDataSource.getBeanProperty(ds, "description")); JDBCDataSource.clearStringBeanProperty(ds, "description"); assertNull(JDBCDataSource.getBeanProperty(ds, "description")); } /* ------------------ JDBC30 (and up) Fixtures ------------------ */ public void testXAHoldability() throws SQLException, XAException { // DERBY-2533 - // This test, when run with Network server / DerbyNetClient // leaves the database is a bad state which results in a // network protocol error if (usingDerbyNetClient()) return; // START XA HOLDABILITY TEST XADataSource dscsx = J2EEDataSource.getXADataSource(); XAConnection xac = dscsx.getXAConnection(); XAResource xr = xac.getXAResource(); Xid xid = new cdsXid(25, (byte) 21, (byte) 01); Connection conn1 = xac.getConnection(); // check that autocommit is true; default for a connection assertTrue(conn1.getAutoCommit()); // check that holdability is HOLD_CURSORS_OVER_COMMIT in a default // CONNECTION(not in xa transaction yet) assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, conn1.getHoldability()); // start a global transaction and default holdability and // autocommit will be switched to match Derby XA restrictions xr.start(xid, XAResource.TMNOFLAGS); // So, now autocommit should be false for connection because it is // part of the global transaction assertFalse(conn1.getAutoCommit()); // Connection's holdability is now CLOSE_CURSORS_AT_COMMIT because // it is part of the global transaction assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, conn1.getHoldability()); xr.end(xid, XAResource.TMSUCCESS); conn1.commit(); conn1.close(); xid = new cdsXid(27, (byte) 21, (byte) 01); xr.start(xid, XAResource.TMNOFLAGS); conn1 = xac.getConnection(); // CONNECTION(in xa transaction) HOLDABILITY: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, conn1.getHoldability()); // Autocommit on Connection inside global transaction should be false assertFalse(conn1.getAutoCommit()); xr.end(xid, XAResource.TMSUCCESS); conn1.rollback(); Connection conn = xac.getConnection(); conn.setAutoCommit(false); conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); // CONNECTION(non-xa transaction) HOLDABILITY: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability()); Statement s = conn.createStatement(); // STATEMENT HOLDABILITY: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); s.executeUpdate("insert into hold_30 values " + "(1,'init2'), (2, 'init3'), (3,'init3')"); s.executeUpdate("insert into hold_30 values " + "(4,'init4'), (5, 'init5'), (6,'init6')"); s.executeUpdate("insert into hold_30 values " + "(7,'init7'), (8, 'init8'), (9,'init9')"); // STATEMENT HOLDABILITY : assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); Statement sh = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); PreparedStatement psh = conn.prepareStatement( "select id from hold_30 for update", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); CallableStatement csh = conn.prepareCall( "select id from hold_30 for update", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // STATEMENT HOLDABILITY : assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, sh.getResultSetHoldability()); // PREPARED STATEMENT HOLDABILITY : assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, psh.getResultSetHoldability()); // CALLABLE STATEMENT HOLDABILITY : assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, csh.getResultSetHoldability()); ResultSet rsh = sh.executeQuery("select id from hold_30 for update"); rsh.next(); assertEquals(1, rsh.getInt(1)); // H@1 id rsh.next(); assertEquals(2, rsh.getInt(1)); // H@2 id conn.commit(); rsh.next(); assertEquals(3, rsh.getInt(1)); // H@3 id conn.commit(); xid = new cdsXid(23, (byte) 21, (byte) 01); xr.start(xid, XAResource.TMNOFLAGS); Statement stmtInsideGlobalTransaction = conn.createStatement(); PreparedStatement prepstmtInsideGlobalTransaction = conn.prepareStatement("select id from hold_30"); CallableStatement callablestmtInsideGlobalTransaction = conn.prepareCall("select id from hold_30"); // CONNECTION(xa) HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability()); // STATEMENT(this one was created with holdability false, outside the // global transaction. Check its holdability inside global transaction assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); // STATEMENT(this one was created with holdability true, // outside the global transaction. Check its holdability inside // global transaction: // DERBY-2531: network server / DerbyNetClient has a different value // than embedded. if (usingEmbedded()) assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sh.getResultSetHoldability()); else if (usingDerbyNetClient()) assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, sh.getResultSetHoldability()); // STATEMENT(this one was created with default holdability inside this // global transaction. Check its holdability: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, stmtInsideGlobalTransaction.getResultSetHoldability()); // PREPAREDSTATEMENT(this one was created with default holdability // inside this global transaction. Check its holdability: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, prepstmtInsideGlobalTransaction.getResultSetHoldability()); // CALLABLESTATEMENT(this one was created with default holdability // inside this global transaction. Check its holdability: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, callablestmtInsideGlobalTransaction.getResultSetHoldability()); ResultSet rsx = s.executeQuery("select id from hold_30 for update"); rsx.next(); assertEquals(1, rsx.getInt(1)); // X@1 id rsx.next(); assertEquals(2, rsx.getInt(1)); // X@2 id xr.end(xid, XAResource.TMSUCCESS); // result set should not be useable, since it is part of a detached // XAConnection try { rsx.next(); fail("rsx's connection not active id "); } catch (SQLException sqle) { assertSQLState("08003", sqle); } // result set should not be useable, it should have been closed by // the xa start. try { rsh.next(); fail("rsh's connection not active id "); } catch (SQLException sqle) { if (usingEmbedded()) assertSQLState("08003", sqle); else if (usingDerbyNetClient()) assertSQLState("XCL16", sqle); } // resume XA transaction and keep using rs"); xr.start(xid, XAResource.TMJOIN); Statement stmtAfterGlobalTransactionResume = conn.createStatement(); PreparedStatement prepstmtAfterGlobalTransactionResume = conn.prepareStatement("select id from hold_30"); CallableStatement callablestmtAfterGlobalTransactionResume = conn.prepareCall("select id from hold_30"); // Check holdability of various jdbc objects after resuming XA // transaction // CONNECTION(xa) HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT,conn.getHoldability()); // STATEMENT(this one was created with holdability false, outside the // global transaction. Check its holdability inside global transaction assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); // STATEMENT(this one was created with holdability true, outside the // global transaction. Check its holdability inside global transaction if (usingEmbedded()) assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sh.getResultSetHoldability()); else if (usingDerbyNetClient()) assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, sh.getResultSetHoldability()); // STATEMENT(this one was created with default holdability inside the // global transaction when it was first started. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, stmtInsideGlobalTransaction.getResultSetHoldability()); // PREPAREDSTATEMENT(this one was created with default holdability // inside the global transaction when it was first started. Check its // holdability) assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, prepstmtInsideGlobalTransaction.getResultSetHoldability()); // CALLABLESTATEMENT(this one was created with default holdability // inside the global transaction when it was first started. Check its // holdability) HOLDABILITY assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, callablestmtInsideGlobalTransaction.getResultSetHoldability()); // STATEMENT(this one was created with default holdability after the // global transaction was resumed. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, stmtAfterGlobalTransactionResume.getResultSetHoldability()); // PREPAREDSTATEMENT(this one was created with default holdability // after the global transaction was resumed. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, prepstmtAfterGlobalTransactionResume.getResultSetHoldability()); // CALLABLESTATEMENT(this one was created with default holdability // after the global transaction was resumed. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, callablestmtAfterGlobalTransactionResume.getResultSetHoldability()); // DERBY-1370 if (usingEmbedded()) { // Network XA BUG gives result set closed rsx.next(); assertEquals(3, rsx.getInt(1)); // X@3 id } xr.end(xid, XAResource.TMSUCCESS); if (xr.prepare(xid) != XAResource.XA_RDONLY) xr.commit(xid, false); // try again once the xa transaction has been committed. try { rsx.next(); fail("rsx's connection not active id (B)"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } try { rsh.next(); fail ("rsh's should be closed (B)"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } // Set connection to hold conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); // CONNECTION(held) HOLDABILITY: assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, conn.getHoldability()); xid = new cdsXid(24, (byte) 21, (byte) 01); xr.start(xid, XAResource.TMNOFLAGS); // CONNECTION(xa) HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability()); try { conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); fail("allowed to set hold mode in xa transaction"); } catch (SQLException sqle) { assertSQLState("XJ05C", sqle); } // JDBC 4.0 (proposed final draft) section 16.1.3.1 allows Statements // to be created with a different holdability if the driver cannot // support it. In this case the driver does not support holdability in // a global transaction, so a valid statement is returned with close // cursors on commit. Statement shxa = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // HOLDABLE Statement in global xact " assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); assertEquals(10000, conn.getWarnings().getErrorCode()); shxa.close(); shxa = conn.prepareStatement("select id from hold_30", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // HOLDABLE PreparedStatement in global xact assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); assertEquals(10000, conn.getWarnings().getErrorCode()); shxa.close(); shxa = conn.prepareCall("CALL SYSCS_UTIL.SYSCS_CHECKPOINT_DATABASE()", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // HOLDABLE CallableStatement in global xact: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); assertEquals(10000, conn.getWarnings().getErrorCode()); shxa.close(); // check we can use a holdable statement set up in local mode. // holdability is downgraded, tested in XATest.java // DERBY-1370 if(usingEmbedded()) { // STATEMENT HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sh.getResultSetHoldability()); sh.executeQuery("select id from hold_30").close(); sh.execute("select id from hold_30"); sh.getResultSet().close(); // PREPARED STATEMENT HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psh.getResultSetHoldability()); psh.executeQuery().close(); psh.execute(); psh.getResultSet().close(); // CALLABLE STATEMENT HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, csh.getResultSetHoldability()); csh.executeQuery().close(); csh.execute(); csh.getResultSet().close(); } // but an update works sh.executeUpdate("insert into hold_30 values(10, 'init10')"); xr.end(xid, XAResource.TMSUCCESS); // CONNECTION(held) HOLDABILITY: assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, conn.getHoldability()); s.close(); sh.close(); csh.close(); psh.close(); rsx.close(); stmtInsideGlobalTransaction.close(); prepstmtInsideGlobalTransaction.close(); callablestmtInsideGlobalTransaction.close(); stmtAfterGlobalTransactionResume.close(); prepstmtAfterGlobalTransactionResume.close(); callablestmtAfterGlobalTransactionResume.close(); conn.close(); xac.close(); TestConfiguration.getCurrent().shutdownDatabase(); // END XA HOLDABILITY TEST"); } /** * Tests for DERBY-1144 * * This test tests that holdability, autocomit, and transactionIsolation * are reset on getConnection for PooledConnections obtaind from * connectionPoolDataSources * * DERBY-1134 has been filed for more comprehensive testing of client * connection state. * * @throws SQLException */ public void timeoutTestDerby1144PooledDS() throws SQLException { PooledConnection pc1 = null; // Test holdability ConnectionPoolDataSource ds = J2EEDataSource.getConnectionPoolDataSource(); pc1 = ds.getPooledConnection(); assertPooledConnHoldability("PooledConnection", pc1); pc1.close(); // Test autocommit pc1 = ds.getPooledConnection(); assertPooledConnAutoCommit("PooledConnection", pc1); pc1.close(); // Test pooled connection isolation pc1 = ds.getPooledConnection(); assertPooledConnIso("PooledConnection" , pc1); pc1.close(); } public void timeoutTestDerby1144XADS() throws SQLException { XADataSource xds = J2EEDataSource.getXADataSource(); // Test xa connection isolation XAConnection xpc1 = xds.getXAConnection(); assertPooledConnIso("XAConnection", xpc1); xpc1.close(); } /* -------------- Helper Methods for testDerby1144 -------------- */ /** * Make sure autocommit gets reset on PooledConnection.getConnection() * @param desc description of connection * @param pc1 pooled connection to test * @throws SQLException */ private static void assertPooledConnAutoCommit( String desc, PooledConnection pc1) throws SQLException { // ** Verify autoCommit state Connection conn = pc1.getConnection(); conn.setAutoCommit(true); // reset the connection and see if the autocommit has changed conn = pc1.getConnection(); boolean autocommit = conn.getAutoCommit(); // autocommit should get reset on getConnection assertTrue(autocommit); conn.close(); } /** * Checks that Holdability gets reset on PooledConnection.getConnection() * @param desc * @param pc1 * @throws SQLException */ private static void assertPooledConnHoldability( String desc, PooledConnection pc1) throws SQLException { // **Test holdability state Connection conn = pc1.getConnection(); conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); // reset the connection and see if the holdability gets reset // to HOLD_CURSORS_OVER_COMMIT conn = pc1.getConnection(); assertConnHoldability(conn, ResultSet.HOLD_CURSORS_OVER_COMMIT); conn.close(); } /** * Verify connection holdablity is expected holdability * @param conn * @param expectedHoldability * * @throws SQLException */ private static void assertConnHoldability( Connection conn, int expectedHoldability) throws SQLException { int holdability = conn.getHoldability(); assertEquals (expectedHoldability, holdability); } /** * Test that isolation is reset on PooledConnection.getConnection() * @param pooledConnType Descripiton of the type of pooled connection * @param pc PooledConnection or XAConnection * @throws SQLException */ private void assertPooledConnIso( String pooledConnType, PooledConnection pc) throws SQLException { Connection conn = pc.getConnection(); setupDerby1144Table(conn); // *** Test isolation level reset on conntype.getConnection() conn.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); assertIsoLocks(conn, Connection.TRANSACTION_READ_UNCOMMITTED); conn.close(); //Get a new connection with pooledConnType.getConnection() // Isolation level should be reset to READ_COMMITTED Connection newconn = pc.getConnection(); assertIsoLocks(newconn, Connection.TRANSACTION_READ_COMMITTED); } /* * insert two rows into the simple table for DERBY-1144 tests * @param conn * @throws SQLException */ private static void setupDerby1144Table(Connection conn) throws SQLException { Statement stmt = conn.createStatement(); stmt.executeUpdate("INSERT INTO intTable VALUES(1)"); stmt.executeUpdate("INSERT INTO intTable VALUES(2)"); conn.commit (); } /* * Checks locks for designated isolation level on the connection. * Currently only supports TRANSACTION_READ_COMMITTED and * TRANSACTION_READ_UNCOMMITTED * @param conn Connection to test * @param isoLevel expected isolation level * */ private void assertIsoLocks(Connection conn, int expectedIsoLevel) throws SQLException { int conniso = conn.getTransactionIsolation(); assertEquals(expectedIsoLevel, conniso); boolean selectTimedOut = selectTimesoutDuringUpdate(conn); // expect a lock timeout for READ_COMMITTED switch (conniso) { case Connection.TRANSACTION_READ_UNCOMMITTED: assertFalse(selectTimedOut); break; case Connection.TRANSACTION_READ_COMMITTED: assertTrue(selectTimedOut); break; default: System.out.println("No test support for isolation level"); } } /* * Determine if a select on this connection during update will timeout. * Used to establish isolation level. If the connection isolation level * is <code> Connection.TRANSACTION_READ_UNCOMMITTED </code> it will not * timeout. Otherwise it should. * * @param conn Connection to test. * @return true if the select got a lock timeout, false otherwise. */ private boolean selectTimesoutDuringUpdate(Connection conn) throws SQLException { Connection updateConn=null; conn.setAutoCommit(false); try { // create another connection and do an update but don't commit updateConn = openDefaultConnection(); updateConn.setAutoCommit(false); // First update the rows on the update connection Statement upStmt = updateConn.createStatement(); upStmt.executeUpdate("update intTable set i = 3"); // now see if we can select them Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("Select * from intTable"); while (rs.next()){}; rs.close(); } catch (SQLException e) { if (e.getSQLState().equals("40XL1")) { // If we got a lock timeout this is not read uncommitted return true; } } finally { try { conn.rollback(); updateConn.rollback(); }catch (SQLException se) { se.printStackTrace(); } } return false; } /* -------------------- Other Helper Methods -------------------- */ private void assertConnectionState( int expectedHoldability, int expectedIsolation, boolean expectedCommitSetting, boolean expectedReadOnly, Connection conn) throws SQLException { assertEquals(expectedHoldability, conn.getHoldability()); assertEquals(expectedIsolation, conn.getTransactionIsolation()); assertEquals(expectedCommitSetting, conn.getAutoCommit()); assertEquals(expectedReadOnly, conn.isReadOnly()); } private static void setDatabaseProperty(String property, String value) throws SQLException { DataSource ds = JDBCDataSource.getDataSource(); Connection cadmin = ds.getConnection(); CallableStatement cs = cadmin.prepareCall( "CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(?, ?)"); cs.setString(1, property); cs.setString(2, value); cs.execute(); cs.close(); cadmin.close(); } private void setHoldability(Connection conn, boolean hold) throws SQLException { conn.setHoldability(hold ? ResultSet.HOLD_CURSORS_OVER_COMMIT : ResultSet.CLOSE_CURSORS_AT_COMMIT); } private static void dsConnectionRequests( String[] expectedValues, DataSource ds) { // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(ds, "javax.sql.DataSource"); } try { ds.getConnection(); if (!expectedValues[0].equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValues[0], sqle); } dsConnectionRequest(expectedValues[1], ds, null, null); dsConnectionRequest(expectedValues[2], ds, "fred", null); dsConnectionRequest(expectedValues[3], ds, "fred", "wilma"); dsConnectionRequest(expectedValues[4], ds, null, "wilma"); dsConnectionRequest( expectedValues[5], ds, null, "databaseName=wombat"); dsConnectionRequest( expectedValues[6], ds, "fred", "databaseName=wombat"); dsConnectionRequest(expectedValues[7], ds, "fred", "databaseName=wombat;password=wilma"); dsConnectionRequest(expectedValues[8], ds, "fred", "databaseName=wombat;password=betty"); } private static void dsConnectionRequest( String expectedValue, DataSource ds, String user, String ConnAttr) { try { ds.getConnection(user, ConnAttr); if (!expectedValue.equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValue, sqle); } } private static void dsConnectionRequests( String[] expectedValues, ConnectionPoolDataSource ds) { try { ds.getPooledConnection(); if (!expectedValues[0].equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValues[0], sqle); } dsConnectionRequest(expectedValues[1], ds, null, null); dsConnectionRequest(expectedValues[2], ds, "fred", null); dsConnectionRequest(expectedValues[3], ds, "fred", "wilma"); dsConnectionRequest(expectedValues[4], ds, null, "wilma"); dsConnectionRequest( expectedValues[5], ds, null, "databaseName=wombat"); dsConnectionRequest( expectedValues[6], ds, "fred", "databaseName=wombat"); dsConnectionRequest(expectedValues[7], ds, "fred", "databaseName=wombat;password=wilma"); dsConnectionRequest(expectedValues[8], ds, "fred", "databaseName=wombat;password=betty"); } private static void dsConnectionRequest(String expectedValue, ConnectionPoolDataSource ds, String user, String ConnAttr) { try { ds.getPooledConnection(user, ConnAttr); if (!expectedValue.equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValue, sqle); } } private static void dsConnectionRequests( String[] expectedValues, XADataSource ds) { try { ds.getXAConnection(); if (!expectedValues[0].equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValues[0], sqle); } dsConnectionRequest(expectedValues[1], ds, null, null); dsConnectionRequest(expectedValues[2], ds, "fred", null); dsConnectionRequest(expectedValues[3], ds, "fred", "wilma"); dsConnectionRequest(expectedValues[4], ds, null, "wilma"); dsConnectionRequest( expectedValues[5], ds, null, "databaseName=" + dbName); dsConnectionRequest( expectedValues[6], ds, "fred", "databaseName=" + dbName); dsConnectionRequest(expectedValues[7], ds, "fred", "databaseName=" + dbName + ";password=wilma"); dsConnectionRequest(expectedValues[8], ds, "fred", "databaseName=" + dbName + ";password=betty"); } private static void dsConnectionRequest(String expectedValue, XADataSource ds, String user, String ConnAttr) { try { ds.getXAConnection(user, ConnAttr); if (!expectedValue.equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValue, sqle); } } protected void assertXAException(String tag, XAException xae) { // for all our cases, we expect some kind of closed con error // but the message is different for embedded vs. network server if (usingEmbedded()) assertEquals("No current connection.", xae.getMessage()); else if (usingDerbyNetClient()) assertEquals( "XAER_RMFAIL : No current connection.", xae.getMessage()); Throwable t = xae.getCause(); if (t instanceof SQLException) assertSQLState("08003", (SQLException)t); } private static void queryOnStatement(String expectedCursorName, int[] expectedValues, Connection conn, Statement s) throws SQLException { try { // DERBY-2531 // network server gives mismatched connections. See also // comment in testAllDataSources() if (usingEmbedded()) assertEquals(conn, s.getConnection()); resultSetQuery(expectedCursorName, expectedValues, s.executeQuery("select * from intTable")); } catch (SQLException sqle) { fail (" did not expect sql exception"); } } private static void resultSetQuery(String expectedCursorName, int[] expectedValues, ResultSet rs) throws SQLException { // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(rs, "java.sql.ResultSet"); } assertEquals(expectedCursorName, rs.getCursorName()); int index=0; while (rs.next()) { assertEquals(expectedValues[index], rs.getInt(1)); index++; } assertEquals(expectedValues.length, index++); rs.close(); } private static void assertLocks(int[] expectedValues, Connection conn) throws SQLException { Statement s = conn.createStatement(); ResultSet rs = s.executeQuery( "SELECT XID, sum(cast (LOCKCOUNT AS INT)) " + "FROM SYSCS_DIAG.LOCK_TABLE AS L GROUP BY XID"); // Don't output actual XID's as they tend for every catalog change // to the system. int xact_index = 0; while (rs.next()) { if (expectedValues != null) assertEquals(expectedValues[xact_index], rs.getInt(2)); else fail("expected no locks"); xact_index++; } if (expectedValues != null) assertEquals(expectedValues.length, xact_index); rs.close(); s.close(); } private void assertStatementState(int[] parameterExpectedValues, int[] expectedValues, Statement s) throws SQLException { assertEquals(expectedValues[0], s.getResultSetType()); assertEquals( expectedValues[1], s.getResultSetConcurrency()); assertEquals( expectedValues[2], s.getFetchDirection()); assertEquals(expectedValues[3], s.getFetchSize()); assertEquals(expectedValues[4], s.getMaxFieldSize()); assertEquals(expectedValues[5], s.getMaxRows()); assertEquals(expectedValues[6], s.getResultSetHoldability()); if (s instanceof PreparedStatement) { PreparedStatement ps = (PreparedStatement) s; ParameterMetaData psmd = ps.getParameterMetaData(); // Parameter count: assertEquals(parameterExpectedValues[0], psmd.getParameterCount()); for (int i = 1; i <= psmd.getParameterCount(); i++) { assertEquals(parameterExpectedValues[i], psmd.getParameterType(i)); } } } /** Create a statement with modified State. */ private Statement createFloatStatementForStateChecking( int[] StatementExpectedValues, Connection conn) throws SQLException { Statement s = internalCreateFloatStatementForStateChecking(conn); s.setCursorName("StokeNewington"); s.setFetchDirection(ResultSet.FETCH_REVERSE); s.setFetchSize(444); s.setMaxFieldSize(713); s.setMaxRows(19); // Create assertStatementState(null, StatementExpectedValues, s); return s; } private Statement internalCreateFloatStatementForStateChecking( Connection conn) throws SQLException { return conn.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } private PreparedStatement createFloatStatementForStateChecking( int[] parameterExpectedValues, int[] PreparedStatementExpectedValues, Connection conn, String sql) throws SQLException { PreparedStatement s = internalCreateFloatStatementForStateChecking(conn, sql); s.setCursorName("StokeNewington"); s.setFetchDirection(ResultSet.FETCH_REVERSE); s.setFetchSize(888); s.setMaxFieldSize(317); s.setMaxRows(91); // PreparedStatement Create assertStatementState( parameterExpectedValues, PreparedStatementExpectedValues, s); return s; } private PreparedStatement internalCreateFloatStatementForStateChecking( Connection conn, String sql) throws SQLException { return conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } private CallableStatement createFloatCallForStateChecking( int[] parameterExpectedValues, int[] CallableStatementExpectedValues, Connection conn, String sql) throws SQLException { CallableStatement s = internalCreateFloatCallForStateChecking(conn, sql); s.setCursorName("StokeNewington"); s.setFetchDirection(ResultSet.FETCH_REVERSE); s.setFetchSize(999); s.setMaxFieldSize(137); s.setMaxRows(85); // Callable Statement Create assertStatementState( parameterExpectedValues, CallableStatementExpectedValues, s); return s; } private CallableStatement internalCreateFloatCallForStateChecking( Connection conn, String sql) throws SQLException { return conn.prepareCall(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } private void assertConnectionOK( Object[] expectedValues, String dsName, Connection conn) throws SQLException { assertEquals( ((Integer)expectedValues[0]).intValue(), conn.getHoldability()); // check it's a 3.0 connection object by checking if // set & release Savepoint is ok. try { conn.releaseSavepoint(conn.setSavepoint()); if (conn.getAutoCommit()) fail("expected a SQLExpection (savepoint with autocommit on"); if (!((String)expectedValues[1]).equals("OK")) fail("expected a SQLExpection (savepoint with autocommit on"); } catch (SQLException sqle) { // we expect savepoints exceptions because either // it's a global transaction, or it's in auto commit mode. if (conn.getAutoCommit()) assertSQLState("XJ010", sqle); else if (((String)expectedValues[1]).equals("OK")) fail ("unexpected JDBC 3.0 savepoint SQL Exception"); else assertSQLState((String)expectedValues[1], sqle); } // Running connection checks // connection checks currently only implemented for Embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(conn, "java.sql.Connection"); SecurityCheck.assertSourceSecurity( conn.getMetaData(), "java.sql.DatabaseMetaData"); } assertEquals(((Integer)expectedValues[2]).intValue(), conn.getTransactionIsolation()); assertEquals(((Boolean)expectedValues[3]).booleanValue(), conn.getAutoCommit()); assertEquals(((Boolean)expectedValues[4]).booleanValue(), conn.isReadOnly()); if (dsName.endsWith("DataSource")) assertNull(conn.getWarnings()); Statement s1 = conn.createStatement(); assertStatementOK(dsName, conn, s1); assertStatementOK(dsName, conn, conn.createStatement (ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); Connection c1 = conn.getMetaData().getConnection(); // c1 and conn should be the same connection object. if (!usingDerbyNetClient() && dsName.indexOf("DataSource")>=0) assertEquals(c1, conn); // Derby-33 - setTypeMap on connection try { conn.setTypeMap(java.util.Collections.EMPTY_MAP); if (!((String)expectedValues[5]).equals("OK")) fail (" expected an sqlexception on setTypeMap(EMPTY_MAP)"); } catch (SQLException sqle) { if (((String)expectedValues[5]).equals("OK")) fail ("setTypeMap(EMPTY_MAP) failed "); else assertSQLState((String)expectedValues[5], sqle); } try { // expect 0A000 - not implemented for client, // XJ081 - invalid null value passed as map for embedded conn.setTypeMap(null); fail ("setTypeMap(null) should throw exception"); } catch (SQLException sqle) { assertSQLState((String)expectedValues[6], sqle); } try { // a populated map, not implemented java.util.Map map = new java.util.HashMap(); map.put("name", "class"); conn.setTypeMap(map); if (!((String)expectedValues[7]).equals("OK")) fail (" expected an sqlexception on setTypeMap(map)"); } catch (SQLException sqle) { if (((String)expectedValues[7]).equals("OK")) fail ("setTypeMap(valid value) failed "); else assertSQLState((String)expectedValues[7], sqle); } assertConnectionPreClose(dsName, conn); conn.close(); // method calls on a closed connection try { conn.close(); // expect no error } catch (SQLException sqle) { fail(" unexpected exception on <closedconn>.close() "); } try { conn.createStatement(); fail (dsName + " <closedconn>.createStatement(), " + "expected 08003 - No current connection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } try { s1.execute("values 1"); fail(dsName + " <closedstmt>.execute(), " + "expected 08003 - No current connection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } } private void assertConnectionPreClose(String dsName, Connection conn) throws SQLException { // before closing the connection, attempt to change holdability // and readOnly conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); if (!dsName.equals("Nested2")) { try { conn.setReadOnly(true); } catch (SQLException sqle) { // cannot set read-only in an active transaction, & sometimes // connections are active at this point. assertSQLState("25501", sqle); } } } private void assertStatementOK(String dsName, Connection conn, Statement s) throws SQLException { // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(s, "java.sql.Statement"); } Connection c1 = s.getConnection(); if (c1 != conn) { // with DerbyNetClient and any kind of DataSource, this goes wrong if (!usingDerbyNetClient() && (dsName.indexOf("DataSource") >= 0)) fail ("incorrect connection object returned for Statement.getConnection()"); } s.addBatch("insert into intTable values 1"); s.addBatch("insert into intTable values 2,3"); int[] states = s.executeBatch(); if (states[0] != 1) fail ("invalid update count for first batch statement"); if (states[1] != 2) fail ("invalid update count for second batch statement"); ResultSet rs = s.executeQuery("VALUES 1"); if (rs.getStatement() != s) fail ("incorrect Statement object returned for ResultSet.getStatement for " + dsName); rs.close(); s.close(); } /** When a connection is being pooled, the underlying JDBC embedded connection object is re-used. As each application gets a new Connection object, that is really a wrapper around the old connection it should reset any connection spoecific state on the embedded connection object. */ private static void PoolReset(String type, PooledConnection pc) throws SQLException { PoolResetWork("1", "C", pc.getConnection()); PoolResetWork("2", "", pc.getConnection()); PoolResetWork("3", "D", pc.getConnection()); pc.close(); } private static void PoolResetWork( String expectedID, String tableAction, Connection conn) throws SQLException { Statement s = conn.createStatement(); if (tableAction.equals("C")) { s.execute("CREATE TABLE PoolResetWork (id int generated always as identity, name varchar(25))"); } ResultSet rs = s.executeQuery("VALUES IDENTITY_VAL_LOCAL()"); rs.next(); String val = rs.getString(1); if (!rs.wasNull() || (val != null)) fail ("initial call to IDENTITY_VAL_LOCAL is not NULL!" + val); rs.close(); s.executeUpdate("INSERT INTO PoolResetWork(name) values ('derby-222')"); rs = s.executeQuery("VALUES IDENTITY_VAL_LOCAL()"); rs.next(); val = rs.getString(1); assertEquals(expectedID, val); rs.close(); if (tableAction.equals("D")) { s.execute("DROP TABLE PoolResetWork"); } s.close(); conn.close(); } /** * Make sure this connection's string is unique (DERBY-243) */ private static void assertToString(Connection conn) throws Exception { assertStringFormat(conn); String str = conn.toString(); if ( conns.containsKey(str)) { throw new Exception("ERROR: Connection toString() is not unique: " + str); } conns.put(str, conn); } /** * Check the format of a pooled connection **/ private static void assertStringFormat(PooledConnection pc) throws Exception { String prefix = assertStringPrefix(pc); String connstr = pc.toString(); String format = prefix + " \\(ID = [0-9]+\\), Physical Connection = " + "<none>|" + CONNSTRING_FORMAT; assertTrue(connstr.matches(format)); } /** * Check the format of the connection string. This is the default test * to run if this is not a BrokeredConnection class */ private static void assertStringFormat(Connection conn) //throws Exception { assertStringPrefix(conn); String str = conn.toString(); assertTrue("\nexpected format:\n " + CONNSTRING_FORMAT + "\nactual value:\n " + str, str.matches(CONNSTRING_FORMAT)); } /** * Make sure the connection string starts with the right prefix, which * is the classname@hashcode. * * @return the expected prefix string, this is used in further string * format checking */ private static String assertStringPrefix(Object conn) //throws Exception { String connstr = conn.toString(); String prefix = conn.getClass().getName() + "@" + conn.hashCode(); // Connection class and has code for connection string should // match prefix assertTrue(connstr.startsWith(prefix)); return prefix; } /** * Check uniqueness of connection strings coming from a * DataSouce */ private static void assertToString(DataSource ds) throws Exception { clearConnections(); int numConnections = 10; for ( int i = 0 ; i < numConnections ; i++ ) { Connection conn = ds.getConnection(); assertToString(conn); } clearConnections(); } /** * Clear out and close connections in the connections * hashtable. */ private static void clearConnections() throws SQLException { java.util.Iterator it = conns.values().iterator(); while ( it.hasNext() ) { Connection conn = (Connection)it.next(); conn.close(); } conns.clear(); } /** * Get connections using getConnection() and make sure * they're unique */ private void assertTenConnectionsUnique() throws Exception { clearConnections(); // Open ten connections rather than just two to // try and catch any odd uniqueness bugs. Still // no guarantee but is better than just two. int numConnections = 10; for ( int i = 0 ; i < numConnections ; i++ ) { Connection conn = openDefaultConnection(); assertToString(conn); } // Now close the connections clearConnections(); } /** * Check uniqueness of strings for an XA data source */ private static void assertToString(XADataSource xds) throws Exception { int numConnections = 10; // First get a bunch of pooled connections // and make sure they're all unique Hashtable xaConns = new Hashtable(); for ( int i = 0 ; i < numConnections ; i++ ) { XAConnection xc = xds.getXAConnection(); assertStringFormat(xc); String str = xc.toString(); // XA connection toString should be unique assertNull(xaConns.get(str)); xaConns.put(str, xc); } // Now check that connections from each of these // pooled connections have different string values Iterator it = xaConns.values().iterator(); clearConnections(); while ( it.hasNext() ) { XAConnection xc = (XAConnection)it.next(); Connection conn = xc.getConnection(); assertToString(conn); } clearConnections(); // Now clear out the pooled connections it = xaConns.values().iterator(); while ( it.hasNext() ) { XAConnection xc = (XAConnection)it.next(); xc.close(); } xaConns.clear(); } /** * Check uniqueness of strings with a pooled data source. * We want to check the PooledConnection as well as the * underlying physical connection. */ private static void assertToString(ConnectionPoolDataSource pds) throws Exception { int numConnections = 10; // First get a bunch of pooled connections // and make sure they're all unique Hashtable pooledConns = new Hashtable(); for ( int i = 0 ; i < numConnections ; i++ ) { PooledConnection pc = pds.getPooledConnection(); assertStringFormat(pc); String str = pc.toString(); // Pooled connection toString should be unique assertNull( pooledConns.get(str)); pooledConns.put(str, pc); } // Now check that connections from each of these // pooled connections have different string values Iterator it = pooledConns.values().iterator(); clearConnections(); while ( it.hasNext() ) { PooledConnection pc = (PooledConnection)it.next(); Connection conn = pc.getConnection(); assertToString(conn); } clearConnections(); // Now clear out the pooled connections it = pooledConns.values().iterator(); while ( it.hasNext() ) { PooledConnection pc = (PooledConnection)it.next(); pc.close(); } pooledConns.clear(); } /** * Return the Java class and method for the procedure * for the nested connection test. */ private static String getNestedMethodName() { return "checkNesConn"; } // calling checkConnection // - for use in a procedure to get a nested connection. public static void checkNesConn (String dsName) throws SQLException { Connection conn = DriverManager.getConnection("jdbc:default:connection"); String EmptyMapValue=null; // Note: currently, not supported String NullMapValue=null; String MapMapValue=null; if (usingEmbedded()) { EmptyMapValue="OK"; NullMapValue="XJ081"; MapMapValue="0A000"; } else if (usingDerbyNetClient()) { EmptyMapValue="0A000"; NullMapValue="0A000"; MapMapValue="0A000"; } Object[] expectedValues = { new Integer(ResultSet.HOLD_CURSORS_OVER_COMMIT), "OK", new Integer(2), new Boolean(false), new Boolean(false), EmptyMapValue, NullMapValue, MapMapValue}; new DataSourceTest("DataSourceTest").assertConnectionOK( expectedValues, dsName, conn); } } class cdsXid implements Xid, Serializable { private static final long serialVersionUID = 64467338100036L; private final int format_id; private byte[] global_id; private byte[] branch_id; cdsXid(int xid, byte b1, byte b2) { format_id = xid; global_id = new byte[Xid.MAXGTRIDSIZE]; branch_id = new byte[Xid.MAXBQUALSIZE]; for (int i = 0; i < global_id.length; i++) { global_id[i] = b1; } for (int i = 0; i < branch_id.length; i++) { branch_id[i] = b2; } } /** * Obtain the format id part of the Xid. * <p> * * @return Format identifier. O means the OSI CCR format. **/ public int getFormatId() { return(format_id); } /** * Obtain the global transaction identifier part of XID as an array of * bytes. * <p> * * @return A byte array containing the global transaction identifier. **/ public byte[] getGlobalTransactionId() { return(global_id); } /** * Obtain the transaction branch qualifier part of the Xid in a byte array. * <p> * * @return A byte array containing the branch qualifier of the transaction. **/ public byte[] getBranchQualifier() { return(branch_id); } }
java/testing/org/apache/derbyTesting/functionTests/tests/jdbcapi/DataSourceTest.java
/* Derby - Class org.apache.derbyTesting.functionTests.tests.jdbcapi.DataSourceTest Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.derbyTesting.functionTests.tests.jdbcapi; import java.io.File; import java.io.Serializable; import java.security.AccessController; import java.sql.CallableStatement; import java.sql.Connection; import java.sql.DriverManager; import java.sql.ParameterMetaData; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.sql.Savepoint; import java.util.Hashtable; import java.util.Iterator; import org.apache.derbyTesting.functionTests.tests.jdbcapi.AssertEventCatcher; import javax.sql.ConnectionEvent; import javax.sql.ConnectionEventListener; import javax.sql.ConnectionPoolDataSource; import javax.sql.DataSource; import javax.sql.PooledConnection; import javax.sql.XAConnection; import javax.sql.XADataSource; import javax.transaction.xa.XAException; import javax.transaction.xa.XAResource; import javax.transaction.xa.Xid; import junit.framework.Test; import junit.framework.TestSuite; import org.apache.derby.jdbc.ClientConnectionPoolDataSource; import org.apache.derby.jdbc.ClientDataSource; import org.apache.derby.jdbc.ClientXADataSource; import org.apache.derby.jdbc.EmbeddedConnectionPoolDataSource; import org.apache.derby.jdbc.EmbeddedDataSource; import org.apache.derby.jdbc.EmbeddedSimpleDataSource; import org.apache.derby.jdbc.EmbeddedXADataSource; import org.apache.derbyTesting.functionTests.util.SecurityCheck; import org.apache.derbyTesting.junit.BaseJDBCTestCase; import org.apache.derbyTesting.junit.CleanDatabaseTestSetup; import org.apache.derbyTesting.junit.DatabasePropertyTestSetup; import org.apache.derbyTesting.junit.J2EEDataSource; import org.apache.derbyTesting.junit.JDBC; import org.apache.derbyTesting.junit.JDBCDataSource; import org.apache.derbyTesting.junit.TestConfiguration; /** * Test the various embedded DataSource implementations of Derby. * * Performs SecurityCheck analysis on the JDBC objects returned. * This is because this test returns to the client a number of * different implementations of Connection, Statement etc. * * @see org.apache.derbyTesting.functionTests.util.SecurityCheck * */ public class DataSourceTest extends BaseJDBCTestCase { private static final String dbName = TestConfiguration.getCurrent().getDefaultDatabaseName(); /** * A hashtable of opened connections. This is used when checking to * make sure connection strings are unique; we need to make sure all * the connections are closed when we are done, so they are stored * in this hashtable */ protected static Hashtable conns = new Hashtable(); /** The expected format of a connection string. In English: * "<classname>@<hashcode> (XID=<xid>), (SESSION = <sessionid>), * (DATABASE=<dbname>), (DRDAID = <drdaid>)" */ private static final String CONNSTRING_FORMAT = "\\S+@\\-?[0-9]+.* \\(XID = .*\\), \\(SESSIONID = [0-9]+\\), " + "\\(DATABASE = [A-Za-z]+\\), \\(DRDAID = .*\\) "; /** * Hang onto the SecurityCheck class while running the * tests so that it is not garbage collected during the * test and lose the information it has collected, * in case it should get printed out. */ private final Object nogc = SecurityCheck.class; public DataSourceTest(String name) { super(name); } /** * Return a suite of tests that are run with a lower lock timeout. * * @param postfix suite name postfix * @return A suite of tests being run with a lower lock timeout. */ private static Test getTimeoutSuite(String postfix) { TestSuite suite = new TestSuite("Lower lock timeout" + postfix); suite.addTest(new DataSourceTest("timeoutTestDerby1144PooledDS")); suite.addTest(new DataSourceTest("timeoutTestDerby1144XADS")); // Reduce the timeout threshold to make the tests run faster. return DatabasePropertyTestSetup.setLockTimeouts(suite, 3, 5); } /** * Return a suite of tests that are run with both client and embedded * * @param postfix suite name postfix * @return A suite of tests to be run with client and/or embedded */ private static Test baseSuite(String postfix) { TestSuite suite = new TestSuite("ClientAndEmbedded" + postfix); suite.addTest(new DataSourceTest("testGlobalLocalInterleaf")); suite.addTest(new DataSourceTest("testSetIsolationWithStatement")); suite.addTest(new DataSourceTest("testJira95xads")); suite.addTest(new DataSourceTest("testBadConnectionAttributeSyntax")); suite.addTest(new DataSourceTest("testDescriptionProperty")); suite.addTest(new DataSourceTest("testConnectionErrorEvent")); suite.addTest(new DataSourceTest("testReadOnlyToWritableTran")); suite.addTest(new DataSourceTest("testAutoCommitOnXAResourceStart")); suite.addTest(new DataSourceTest("testAllDataSources")); suite.addTest(new DataSourceTest("testClosedCPDSConnection")); suite.addTest(new DataSourceTest("testClosedXADSConnection")); suite.addTest(new DataSourceTest("testSetSchemaInXAConnection")); return suite; } /** * Return a suite of tests that are run with client only * * @return A suite of tests being run with client only */ private static Test getClientSuite() { TestSuite suite = new TestSuite("Client/Server"); suite.addTest(new DataSourceTest("testClientDSConnectionAttributes")); suite.addTest(new DataSourceTest( "testClientTraceFileDSConnectionAttribute")); suite.addTest(new DataSourceTest( "testClientMessageTextConnectionAttribute")); return suite; } /** * Return a suite of tests that are run with embedded only * * @param postfix suite name postfix * @return A suite of tests being run with embedded only */ private static Test getEmbeddedSuite(String postfix) { TestSuite suite = new TestSuite("Embedded" + postfix); suite.addTest(new DataSourceTest("testDSRequestAuthentication")); // Due to a bug following cannot be run for client - DERBY-3379 // To run this fixture with client, add to getClientSuite(), // when DERBY-3379 is fixed, remove from here (and client) and // move to baseSuite. suite.addTest(new DataSourceTest("testPooledReuseOnClose")); // when DERBY-2498 gets fixed, move these two to baseSuite suite.addTest(new DataSourceTest("testJira95ds")); suite.addTest(new DataSourceTest("testJira95pds")); // Following cannot run with client because of DERBY-2533; it hangs // when fixed, this can be moved to baseSuite. suite.addTest(new DataSourceTest("testReuseAcrossGlobalLocal")); suite.addTest(new DataSourceTest("testXAHoldability")); return suite; } public static Test suite() { if (JDBC.vmSupportsJSR169()) { // test uses unsupported classes like DriverManager, XADataSource, // ConnectionPoolDataSource, ConnectionEvenListenere, as well as // unsupported methods, like Connection.setTypeMap()... TestSuite suite = new TestSuite("DatasourceTest cannot run with JSR169"); return suite; } else { TestSuite suite = new TestSuite("DataSourceTest suite"); // Add tests that will run with both embedded suite.addTest(baseSuite(":embedded")); // and network server/client suite.addTest(TestConfiguration.clientServerDecorator( baseSuite(":client"))); // Add the tests that only run with client suite.addTest(TestConfiguration.clientServerDecorator( getClientSuite())); // Add the tests that only run with embedded suite.addTest(getEmbeddedSuite("embedded")); // Add the tests relying on getting timeouts. suite.addTest(getTimeoutSuite(":embedded")); suite.addTest(TestConfiguration.clientServerDecorator( getTimeoutSuite(":client"))); // wrap all in CleanDatabaseTestSetup that creates all database // objects any fixture might need. // Note that not all fixtures need (all of) these. return new CleanDatabaseTestSetup(suite) { /** * Create and populate database objects * * @see org.apache.derbyTesting.junit.CleanDatabaseTestSetup#decorateSQL(java.sql.Statement) */ protected void decorateSQL(Statement s) throws SQLException { s.executeUpdate("create table autocommitxastart(i int)"); s.executeUpdate("insert into autocommitxastart values 1,2,3,4,5"); s.executeUpdate("create schema SCHEMA_Patricio"); s.executeUpdate("create table " + "SCHEMA_Patricio.Patricio (id VARCHAR(255), value INTEGER)"); s.executeUpdate("create table intTable(i int)"); s.executeUpdate("create table hold_30 " + "(id int not null primary key, b char(30))"); s.executeUpdate( "create procedure checkConn2(in dsname varchar(20)) " + "parameter style java language java modifies SQL DATA " + "external name " + "'org.apache.derbyTesting.functionTests.tests.jdbcapi.DataSourceTest." + getNestedMethodName() + "'"); } }; } } public void tearDown() throws Exception { // attempt to get rid of any left-over trace files AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { for (int i=0 ; i < 6 ; i++) { String traceFileName = "trace" + (i+1) + ".out"; File traceFile = new File(traceFileName); if (traceFile.exists()) { // if it exists, attempt to get rid of it traceFile.delete(); } } return null; } }); super.tearDown(); } /* comment out. leaving in, just in case it's ever relevant. * when uncommented, this will run when network server tests are * started, and then reflect the results of the embedded checks. // perform security analysis of the public api for the embedded engine public void testDataSourceAPI() throws SQLException, ClassNotFoundException { SecurityCheck.report(); } */ /** * Test case for DERBY-3172 * When the Derby engine is shutdown or Network Server is brought down, any * api on JDBC Connection object should generate a Connection error event. */ public void testConnectionErrorEvent() throws SQLException, Exception { AssertEventCatcher aes12 = new AssertEventCatcher(12); ConnectionPoolDataSource ds = J2EEDataSource.getConnectionPoolDataSource(); PooledConnection pc = ds.getPooledConnection(); //Add a connection event listener to ConnectionPoolDataSource pc.addConnectionEventListener(aes12); Connection conn = pc.getConnection(); dropTable(conn, "TAB1"); //No event should have been generated at this point assertFalse(aes12.didConnectionClosedEventHappen()); assertFalse(aes12.didConnectionErrorEventHappen()); aes12.resetState(); //Shutdown the Derby engine or Network Server depending on what //mode we are running in. if (usingEmbedded()) { getTestConfiguration().shutdownDatabase(); } else { getTestConfiguration().stopNetworkServer(); } //Now try to use various apis on the JDBC Connection object created //before shutdown and they all should generate connection error event. try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)"); } catch (SQLException e) { //The first call on JDBC Connection object after Network Server //shutdown will generate a communication error and that's why we //are checking for SQL State 08006 rather than No current connection //SQL State 08003. In embedded mode, we will get SQL State 08003 //meaning No current connection if (usingEmbedded()) assertSQLState("08003", e); else assertSQLState("08006", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", 1); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { int[] columnIndexes = {1}; conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", columnIndexes); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { String[] columnNames = {"col1"}; conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", columnNames); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareStatement("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.createStatement(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareCall("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareCall("CREATE TABLE TAB1(COL1 INT NOT NULL)"); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.prepareCall("CREATE TABLE TAB1(COL1 INT NOT NULL)", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.nativeSQL("CREATE TABLE TAB1(COL1 INT NOT NULL)"); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getAutoCommit(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setAutoCommit(false); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getHoldability(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setHoldability(1); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.commit(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.rollback(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setSavepoint(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setSavepoint("savept1"); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.rollback((Savepoint)null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.releaseSavepoint((Savepoint)null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getTransactionIsolation(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getWarnings(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.clearWarnings(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getMetaData(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.isReadOnly(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setReadOnly(true); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setCatalog(null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getCatalog(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.getTypeMap(); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); try { conn.setTypeMap(null); } catch (SQLException e) { assertSQLState("08003", e); } assertFalse(aes12.didConnectionClosedEventHappen()); assertTrue(aes12.didConnectionErrorEventHappen()); aes12.resetState(); if (usingEmbedded()) { Class.forName("org.apache.derby.jdbc.EmbeddedDriver").newInstance(); }else { getTestConfiguration().startNetworkServer(); } // Get a new connection to the database conn = getConnection(); conn.close(); } /** * Test that a PooledConnection can be reused and closed * (separately) during the close event raised by the * closing of its logical connection. * DERBY-2142. * @throws SQLException * */ public void testPooledReuseOnClose() throws SQLException { // PooledConnection from a ConnectionPoolDataSource ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); subtestPooledReuseOnClose(cpds.getPooledConnection()); subtestPooledCloseOnClose(cpds.getPooledConnection()); // PooledConnection from an XDataSource XADataSource xads = J2EEDataSource.getXADataSource(); subtestPooledReuseOnClose(xads.getXAConnection()); subtestPooledCloseOnClose(xads.getXAConnection()); } /** * Tests that a pooled connection can successfully be reused * (a new connection obtained from it) during the processing * of its close event by its listener. * Sections 11.2 & 12.5 of JDBC 4 specification indicate that the * connection can be returned to the pool when the * ConnectionEventListener.connectionClosed() is called. */ private void subtestPooledReuseOnClose(final PooledConnection pc) throws SQLException { final Connection[] newConn = new Connection[1]; pc.addConnectionEventListener(new ConnectionEventListener() { /** * Mimic a pool handler that returns the PooledConnection * to the pool and then reallocates it to a new logical connection. */ public void connectionClosed(ConnectionEvent event) { PooledConnection pce = (PooledConnection) event.getSource(); assertSame(pc, pce); try { // open a new logical connection and pass // back to the fixture. newConn[0] = pce.getConnection(); } catch (SQLException e) { // Need to catch the exception here because // we cannot throw an exception through // the api method. fail(e.getMessage()); } } public void connectionErrorOccurred(ConnectionEvent event) { } }); // Open a connection then close it to trigger the // fetching of a new connection in the callback. Connection c1 = pc.getConnection(); c1.close(); // Fetch the connection created in the close callback Connection c2 = newConn[0]; assertNotNull(c2); // Ensure the connection is useable, this hit a NPE before DERBY-2142 // was fixed (for embedded). c2.createStatement().close(); pc.close(); } /** * Tests that a pooled connection can successfully be closed * during the processing of its close event by its listener. */ private void subtestPooledCloseOnClose(final PooledConnection pc) throws SQLException { pc.addConnectionEventListener(new ConnectionEventListener() { /** * Mimic a pool handler that closes the PooledConnection * (say it no longer needs it, pool size being reduced) */ public void connectionClosed(ConnectionEvent event) { PooledConnection pce = (PooledConnection) event.getSource(); assertSame(pc, pce); try { pce.close(); } catch (SQLException e) { // Need to catch the exception here because // we cannot throw an exception through // the api method. fail(e.getMessage()); } } public void connectionErrorOccurred(ConnectionEvent event) { } }); // Open and close a connection to invoke the logic above // through the callback pc.getConnection().close(); // The callback closed the actual pooled connection // so subsequent requests to get a logical connection // should fail. try { pc.getConnection(); fail("PooledConnection should be closed"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } } public void testAllDataSources() throws SQLException, Exception { Connection dmc = getConnection(); CallableStatement cs = dmc.prepareCall("call checkConn2(?)"); cs.setString(1,"Nested"); try { cs.execute(); } catch (SQLException sqle) { assertSQLState("40XC0", sqle); } cs.setString(1,"Nested2"); cs.execute(); String EmptyMapValue=null; // Note: currently, not supported String NullMapValue=null; String MapMapValue=null; if (usingEmbedded()) { EmptyMapValue="OK"; NullMapValue="XJ081"; MapMapValue="0A000"; } else if (usingDerbyNetClient()) { EmptyMapValue="0A000"; NullMapValue="0A000"; MapMapValue="0A000"; } Object[] expectedValues = { new Integer(ResultSet.HOLD_CURSORS_OVER_COMMIT), "XJ010", new Integer(2), new Boolean(true), new Boolean(false), EmptyMapValue, NullMapValue, MapMapValue}; assertConnectionOK(expectedValues, "DriverManager ", dmc); if (usingEmbedded()) assertTenConnectionsUnique(); DataSource dscs = JDBCDataSource.getDataSource(); if (usingEmbedded()) assertToString(dscs); DataSource ds = dscs; assertConnectionOK(expectedValues, "DataSource", ds.getConnection()); DataSource dssimple = null; // simple datasource is only supported with embedded if (usingEmbedded()) { EmbeddedSimpleDataSource realdssimple = new EmbeddedSimpleDataSource(); realdssimple.setDatabaseName(dbName); ds = realdssimple; dssimple = (DataSource)realdssimple; assertConnectionOK( expectedValues, "SimpleDataSource", ds.getConnection()); } ConnectionPoolDataSource dsp = J2EEDataSource.getConnectionPoolDataSource(); if (usingEmbedded()) assertToString(dsp); PooledConnection pc = dsp.getPooledConnection(); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity( pc, "javax.sql.PooledConnection"); } AssertEventCatcher aes1 = new AssertEventCatcher(1); pc.addConnectionEventListener(aes1); // DERBY-2531 // with Network Server / DerbyNetClient, the assertConnectionOK check // returns a different connection object... assertConnectionOK( expectedValues, "ConnectionPoolDataSource", pc.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes1.didConnectionClosedEventHappen()); assertFalse(aes1.didConnectionErrorEventHappen()); aes1.resetState(); assertConnectionOK( expectedValues, "ConnectionPoolDataSource", pc.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes1.didConnectionClosedEventHappen()); assertFalse(aes1.didConnectionErrorEventHappen()); aes1.resetState(); XADataSource dsx = J2EEDataSource.getXADataSource(); if (usingEmbedded()) assertToString(dsx); // shutdown db and check all's still ok thereafter TestConfiguration.getCurrent().shutdownDatabase(); dmc = getConnection(); cs = dmc.prepareCall("call checkConn2(?)"); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity( cs, "java.sql.CallableStatement"); } cs.setString(1,"Nested"); try { cs.execute(); } catch (SQLException sqle) { assertSQLState("40XC0", sqle); } cs.setString(1, "Nested2"); cs.execute(); XAConnection xac = dsx.getXAConnection(); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(xac, "javax.sql.XAConnection"); } AssertEventCatcher aes3 = new AssertEventCatcher(3); xac.addConnectionEventListener(aes3); assertConnectionOK( expectedValues, "XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes3.didConnectionClosedEventHappen()); assertFalse(aes3.didConnectionErrorEventHappen()); aes3.resetState(); pc = dsp.getPooledConnection(); AssertEventCatcher aes2 = new AssertEventCatcher(2); pc.addConnectionEventListener(aes2); assertConnectionOK( expectedValues, "ConnectionPoolDataSource", pc.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes2.didConnectionClosedEventHappen()); assertFalse(aes2.didConnectionErrorEventHappen()); aes2.resetState(); // test "local" XAConnections xac = dsx.getXAConnection(); AssertEventCatcher aes4 = new AssertEventCatcher(4); xac.addConnectionEventListener(aes4); assertConnectionOK( expectedValues, "XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes4.didConnectionClosedEventHappen()); assertFalse(aes4.didConnectionErrorEventHappen()); aes4.resetState(); assertConnectionOK( expectedValues, "XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes4.didConnectionClosedEventHappen()); assertFalse(aes4.didConnectionErrorEventHappen()); aes4.resetState(); xac.close(); // test "global" XAConnections xac = dsx.getXAConnection(); AssertEventCatcher aes5 = new AssertEventCatcher(5); xac.addConnectionEventListener(aes5); XAResource xar = xac.getXAResource(); // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity( xar, "javax.transaction.xa.XAResource"); } Xid xid = new cdsXid(1, (byte) 35, (byte) 47); xar.start(xid, XAResource.TMNOFLAGS); Connection xacc = xac.getConnection(); xacc.close(); expectedValues[0] = new Integer(ResultSet.CLOSE_CURSORS_AT_COMMIT); if (usingEmbedded()) expectedValues[1] = "XJ058"; expectedValues[3] = new Boolean(false); assertConnectionOK( expectedValues, "Global XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); assertConnectionOK( expectedValues, "Global XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); xar.end(xid, XAResource.TMSUCCESS); expectedValues[0] = new Integer(ResultSet.HOLD_CURSORS_OVER_COMMIT); expectedValues[3] = new Boolean(true); assertConnectionOK(expectedValues, "Switch to local XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); assertConnectionOK(expectedValues, "Switch to local XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); Connection backtoGlobal = xac.getConnection(); xar.start(xid, XAResource.TMJOIN); expectedValues[0] = new Integer(ResultSet.CLOSE_CURSORS_AT_COMMIT); expectedValues[3] = new Boolean(false); assertConnectionOK(expectedValues, "Switch to global XADataSource", backtoGlobal); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); assertConnectionOK(expectedValues, "Switch to global XADataSource", xac.getConnection()); //Check if got connection closed event but not connection error event assertTrue(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); xar.end(xid, XAResource.TMSUCCESS); xar.commit(xid, true); xac.close(); } public void testClosedCPDSConnection() throws SQLException, Exception { // verify that outstanding updates from a closed connection, obtained // from a ConnectionPoolDataSource, are not committed, but rolled back. ConnectionPoolDataSource dsp = J2EEDataSource.getConnectionPoolDataSource(); PooledConnection pc = dsp.getPooledConnection(); Connection c1 = pc.getConnection(); Statement s = c1.createStatement(); // start by deleting all rows from intTable s.executeUpdate("delete from intTable"); c1.setAutoCommit(false); // this update should get rolled back later s.executeUpdate("insert into intTable values(1)"); // this should automatically close the original connection c1 = pc.getConnection(); ResultSet rs = c1.createStatement().executeQuery("select count(*) from intTable"); rs.next(); assertEquals(0, rs.getInt(1)); c1.close(); // check connection objects are closed once connection is closed try { rs.next(); fail("ResultSet is open for a closed connection obtained from PooledConnection"); } catch (SQLException sqle) { // 08003 - No current connection; XCL16 - ResultSet not open if (usingEmbedded()) assertSQLState("08003", sqle); else if (usingDerbyNetClient()) assertSQLState("XCL16", sqle); } try { s.executeUpdate("update intTable set i = 1"); fail("Statement is open for a closed connection " + "obtained from PooledConnection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } pc.close(); pc = null; PoolReset("ConnectionPoolDataSource", dsp.getPooledConnection()); s.close(); rs.close(); c1.close(); } public void testClosedXADSConnection() throws SQLException, Exception { // verify that outstanding updates from a closed connection, obtained // from an XADataSource, are not committed, but rolled back. XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); Connection c1 = xac.getConnection(); Statement s = c1.createStatement(); c1.setAutoCommit(false); // this update should be rolled back s.executeUpdate("insert into intTable values(2)"); c1 = xac.getConnection(); ResultSet rs = c1.createStatement().executeQuery( "select count(*) from intTable"); rs.next(); assertEquals(0, rs.getInt(1)); rs.close(); c1.close(); xac.close(); xac = null; PoolReset("XADataSource", dsx.getXAConnection()); } public void testGlobalLocalInterleaf() throws SQLException, XAException { // now some explicit tests for how connection state behaves // when switching between global transactions and local // and setting connection state. // some of this may be tested elsewhere too. XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); AssertEventCatcher aes6 = new AssertEventCatcher(6); xac.addConnectionEventListener(aes6); XAResource xar = xac.getXAResource(); Xid xid = new cdsXid(1, (byte) 93, (byte) 103); // series 1 - Single connection object Connection cs1 = xac.getConnection(); // initial local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); xar.start(xid, XAResource.TMNOFLAGS); // initial X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, false, cs1); cs1.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); cs1.setReadOnly(true); setHoldability(cs1, false); // close cursors // modified X1 boolean ReadOnly = false; // see DERBY-911, ReadOnly state different for Embedded/DerbyNetClient if (usingEmbedded()) ReadOnly = true; assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // the underlying local transaction/connection must pick up the // state of the Connection handle cs1 // modified local: assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, ReadOnly, cs1); cs1.setTransactionIsolation(Connection.TRANSACTION_READ_COMMITTED); cs1.setReadOnly(false); setHoldability(cs1, false); // close cursors // reset local assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); // now re-join the transaction, should pick up the read-only // and isolation level from the transaction, // holdability remains that of this handle. xar.start(xid, XAResource.TMJOIN); // re-join X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // back to local - should be the same as the reset local assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); // test suspend/resume // now re-join the transaction (X1) for the second time, should pick // up the read-only and isolation level from the transaction, // holdability remains that of this handle. xar.start(xid, XAResource.TMJOIN); assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUSPEND); // local after suspend assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); xar.start(xid, XAResource.TMRESUME); // resume X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // back to local (second time) assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); cs1.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); cs1.setReadOnly(true); setHoldability(cs1, true); // hold //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); cs1.close(); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); cs1 = xac.getConnection(); // new handle - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, cs1); cs1.close(); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); xar.start(xid, XAResource.TMJOIN); cs1 = xac.getConnection(); // re-join with new handle X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); cs1.close(); xar.end(xid, XAResource.TMSUCCESS); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); // now get a connection (attached to a local) // attach to the global and commit it. // state should be that of the local after the commit. cs1 = xac.getConnection(); cs1.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); // pre-X1 commit - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, cs1); xar.start(xid, XAResource.TMJOIN); // pre-X1 commit - X1 assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, ReadOnly, cs1); xar.end(xid, XAResource.TMSUCCESS); // post-X1 end - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, cs1); xar.commit(xid, true); // post-X1 commit - local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, cs1); //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); cs1.close(); //Check if got connection closed event but not connection error event assertTrue(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); } // really part of testGlobalLocalInterLeaf: /** * @throws SQLException * @throws XAException */ public void testSetIsolationWithStatement() throws SQLException, XAException { // DERBY-421 Setting isolation level with SQL was not getting // handled correctly // Some more isolation testing using SQL and JDBC api XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); AssertEventCatcher aes6 = new AssertEventCatcher(6); xac.addConnectionEventListener(aes6); XAResource xar = xac.getXAResource(); Connection conn = xac.getConnection(); Statement s = conn.createStatement(); // initial local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_COMMITTED, true, false, conn); // Issue setTransactionIsolation in local transaction conn.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); // setTransactionIsolation in local assertConnectionState( ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); Xid xid; //Issue SQL to change isolation in local transaction s.executeUpdate("set current isolation = RR"); assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_SERIALIZABLE, true, false, conn); xid = new cdsXid(1, (byte) 35, (byte) 47); xar.start(xid, XAResource.TMNOFLAGS); // 1st global (new) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); // local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_SERIALIZABLE, true, false, conn); //Issue SQL to change isolation in local transaction s.executeUpdate("set current isolation = RS"); assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, conn); // DERBY-1325 - Isolation level of local connection does not get reset after ending // a global transaction that was joined/resumed if the isolation level was changed // using SQL xar.start(xid, XAResource.TMJOIN); // 1st global(existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); // local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, conn); // DERBY-1325 end test Xid xid2 = new cdsXid(1, (byte) 93, (byte) 103); xar.start(xid2, XAResource.TMNOFLAGS); // 2nd global (new) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, false, false, conn); xar.end(xid2, XAResource.TMSUCCESS); xar.start(xid, XAResource.TMJOIN); // 1st global (existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); //local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, true, false, conn); xar.start(xid, XAResource.TMJOIN); // 1st global (existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_SERIALIZABLE, false, false, conn); // Issue SQL to change isolation in 1st global transaction s.executeUpdate("set current isolation = UR"); assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, false, false, conn); xar.end(xid, XAResource.TMSUCCESS); // local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); xar.start(xid2, XAResource.TMJOIN); // 2nd global (existing) assertConnectionState(ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_REPEATABLE_READ, false, false, conn); xar.end(xid2, XAResource.TMSUCCESS); xar.rollback(xid2); // (After 2nd global rollback ) local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); xar.rollback(xid); // (After 1st global rollback) local assertConnectionState(ResultSet.HOLD_CURSORS_OVER_COMMIT, Connection.TRANSACTION_READ_UNCOMMITTED, true, false, conn); //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); } // This test includes some short-hand descriptions of the test cases // left in for reference to the original non-junit test public void testReuseAcrossGlobalLocal() throws SQLException, XAException { // DERBY-2533 - // network server cannot run this test - it hits a protocol error // on tearDown. Embedded requires a database shutdown if (usingDerbyNetClient()) return; int[] onetwothree = {1,2,3}; int[] three = {3}; int[] pspc = {1, 4}; // expected parameter count for prepared statements int[] cspc = {2, 12, 12}; // for callable statements // statics for testReuseAcrossGlobalLocal int[] StatementExpectedValues = { ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.FETCH_REVERSE, 444, 713, 19, ResultSet.HOLD_CURSORS_OVER_COMMIT}; //ResultSet.CLOSE_CURSORS_AT_COMMIT}; int[] PreparedStatementExpectedValues = { ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.FETCH_REVERSE, 888, 317, 91, ResultSet.HOLD_CURSORS_OVER_COMMIT}; int[] CallableStatementExpectedValues = { ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.FETCH_REVERSE, 999, 137, 85, ResultSet.HOLD_CURSORS_OVER_COMMIT}; XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac = dsx.getXAConnection(); AssertEventCatcher aes6 = new AssertEventCatcher(6); xac.addConnectionEventListener(aes6); XAResource xar = xac.getXAResource(); Xid xid = new cdsXid(1, (byte) 103, (byte) 119); // now check re-use of *Statement objects across local/global // connections. Connection cs1 = xac.getConnection(); // ensure read locks stay around until end-of transaction cs1.setTransactionIsolation(Connection.TRANSACTION_REPEATABLE_READ); cs1.setAutoCommit(false); assertLocks(null, cs1); Statement sru1 = cs1.createStatement(); sru1.setCursorName("SN1"); sru1.executeUpdate("insert into intTable values 1,2,3"); Statement sruBatch = cs1.createStatement(); sruBatch.setCursorName("sruBatch"); Statement sruState = createFloatStatementForStateChecking( StatementExpectedValues, cs1); PreparedStatement psruState = createFloatStatementForStateChecking( new int[] {1, 4}, PreparedStatementExpectedValues, cs1, "select i from intTable where i = ?"); CallableStatement csruState = createFloatCallForStateChecking( new int[] {2, 12, 12}, CallableStatementExpectedValues, cs1, "CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(?,?)"); PreparedStatement psParams = cs1.prepareStatement("select * from intTable where i > ?"); psParams.setCursorName("params"); psParams.setInt(1, 2); // Params-local-1 resultSetQuery("params", three, psParams.executeQuery()); sruBatch.addBatch("insert into intTable values 4"); // sru1-local-1 queryOnStatement("SN1", onetwothree, cs1, sru1); cs1.commit(); // need to commit to switch to an global connection; // simple case - underlying connection is re-used for global. xar.start(xid, XAResource.TMNOFLAGS); // Expecting downgrade because global transaction sru1-global-2 is // using a statement with holdability true // sru1-global-2 queryOnStatement("SN1", onetwothree, cs1, sru1); sruBatch.addBatch("insert into intTable values 5"); Statement sru2 = cs1.createStatement(); sru2.setCursorName("OAK2"); //sru2-global-3 queryOnStatement("OAK2", onetwothree, cs1, sru2); // Expecting downgrade because global transaction sru1-global-4 is // using a statement with holdability true // sru1-global-4 queryOnStatement("SN1", onetwothree, cs1, sru1); // Global statement StatementExpectedValues[6] = ResultSet.CLOSE_CURSORS_AT_COMMIT; PreparedStatementExpectedValues[6] = ResultSet.CLOSE_CURSORS_AT_COMMIT; CallableStatementExpectedValues[6] = ResultSet.CLOSE_CURSORS_AT_COMMIT; assertStatementState(null, StatementExpectedValues ,sruState); // Global PreparedStatement assertStatementState(pspc, PreparedStatementExpectedValues, psruState); // Global CallableStatement assertStatementState(cspc, CallableStatementExpectedValues, csruState); // Params-global-1 resultSetQuery("params", three, psParams.executeQuery()); xar.end(xid, XAResource.TMSUCCESS); // now a new underlying connection is created // sru1-local-5 queryOnStatement("SN1", onetwothree, cs1, sru1); // sru2-local-6 queryOnStatement("OAK2", onetwothree, cs1, sru2); sruBatch.addBatch("insert into intTable values 6,7"); Statement sru3 = cs1.createStatement(); sru3.setCursorName("SF3"); // sru3-local-7 queryOnStatement("SF3", onetwothree, cs1, sru3); // Two transactions should hold locks (global and the current XA); // LOCAL StatementExpectedValues[6] = ResultSet.HOLD_CURSORS_OVER_COMMIT; PreparedStatementExpectedValues[6] = ResultSet.HOLD_CURSORS_OVER_COMMIT; CallableStatementExpectedValues[6] = ResultSet.HOLD_CURSORS_OVER_COMMIT; assertStatementState(null, StatementExpectedValues, sruState); assertStatementState(pspc, PreparedStatementExpectedValues, psruState); assertStatementState(cspc, CallableStatementExpectedValues, csruState); // Params-local-2 resultSetQuery("params", three, psParams.executeQuery()); assertLocks(new int[] {14,14}, cs1); cs1.commit(); //Confirm - no connection closed event & connection error event assertFalse(aes6.didConnectionClosedEventHappen()); assertFalse(aes6.didConnectionErrorEventHappen()); aes6.resetState(); // attach the XA transaction to another connection and see what happens XAConnection xac2 = dsx.getXAConnection(); AssertEventCatcher aes5 = new AssertEventCatcher(5); xac2.addConnectionEventListener(aes5); XAResource xar2 = xac2.getXAResource(); xar2.start(xid, XAResource.TMJOIN); Connection cs2 = xac2.getConnection(); // these statements were generated by cs1 and thus are still // in a local connection. // sru1-local-8 queryOnStatement("SN1", onetwothree, cs1, sru1); // sru2-local-9 queryOnStatement("OAK2", onetwothree, cs1, sru2); // sru3-local-10 queryOnStatement("SF3", onetwothree, cs1, sru3); sruBatch.addBatch("insert into intTable values 8"); // LOCAL 2 assertStatementState(null, StatementExpectedValues, sruState); assertStatementState(pspc, PreparedStatementExpectedValues, psruState); assertStatementState(cspc, CallableStatementExpectedValues, csruState); assertLocks(new int[] {14, 12}, cs1); int[] updateCounts = sruBatch.executeBatch(); int[] expectedUpdateCounts = {1, 1, 2, 1}; // sruBatch update counts: for (int i = 0; i < updateCounts.length; i++) { assertEquals(expectedUpdateCounts[i], updateCounts[i]); } // sruBatch queryOnStatement( "sruBatch", new int[] {1,2,3,4,5,6,7,8}, cs1, sruBatch); xar2.end(xid, XAResource.TMSUCCESS); //Confirm - no connection closed event & connection error event assertFalse(aes5.didConnectionClosedEventHappen()); assertFalse(aes5.didConnectionErrorEventHappen()); aes5.resetState(); xac2.close(); // allow close on already closed XAConnection xac2.close(); xac2.addConnectionEventListener(null); xac2.removeConnectionEventListener(null); // test methods against a closed XAConnection and its resource try { xac2.getXAResource(); // DERBY-2532 // Network Server does not think this is worth an exception. if (usingEmbedded()) fail("expected SQLException on " + "closed XAConnection.getXAResource"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } try { xac2.getConnection(); fail ("expected SQLException on XAConnection.getConnection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } try { xar2.start(xid, XAResource.TMJOIN); fail ("expected XAException on XAResource.TMJOIN"); } catch (XAException xae) { assertXAException("XAResource.start", xae); } try { xar2.end(xid, XAResource.TMJOIN); fail ("expected XAException on XAResource.TMJOIN"); } catch (XAException xae) { assertXAException("XAResource.end", xae); } try { xar2.commit(xid, true); fail ("expected XAException on XAResource.commit"); } catch (XAException xae) { assertXAException("XAResource.commit", xae); } try { xar2.prepare(xid); fail ("expected XAException on XAResource.prepare"); } catch (XAException xae) { assertXAException("XAResource.prepare", xae); } try { xar2.recover(0); fail ("expected XAException on XAResource.recover"); } catch (XAException xae) { assertXAException("XAResource.recover", xae); } try { xar2.prepare(xid); fail ("expected XAException on XAResource.prepare"); } catch (XAException xae) { assertXAException("XAResource.prepare", xae); } try { xar2.isSameRM(xar2); fail ("expected XAException on XAResource.isSameRM"); } catch (XAException xae) { assertXAException("XAResource.isSameRM", xae); } // close everything cs1.rollback(); sruState.close(); psruState.close(); csruState.close(); psParams.close(); sruBatch.close(); sru1.close(); sru2.close(); sru3.close(); cs1.close(); cs2.close(); xac.removeConnectionEventListener(null); xac.close(); xac2.close(); // but, still not enough. // what with all the switching between global and local transactions // we still have a lock open on intTable, which will interfere with // our tearDown efforts. Bounce the database. TestConfiguration.getCurrent().shutdownDatabase(); } public void testSetSchemaInXAConnection() throws SQLException { // tests that set schema works correctly in an XA connection. XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac3 = dsx.getXAConnection(); Connection conn3 = xac3.getConnection(); Statement st3 = conn3.createStatement(); st3.execute("SET SCHEMA SCHEMA_Patricio"); st3.close(); PreparedStatement ps3 = conn3.prepareStatement("INSERT INTO Patricio VALUES (?, ?)"); ps3.setString(1, "Patricio"); ps3.setInt(2, 3); ps3.executeUpdate(); assertEquals(1, ps3.getUpdateCount()); ps3.close(); conn3.close(); xac3.close(); } // test that an xastart in auto commit mode commits the existing work. // test fix of a bug ('beetle 5178') wherein XAresource.start() when // auto-commit is true did not implictly commit any transaction // Also tests DERBY-1025, same description, but for client. public void testAutoCommitOnXAResourceStart() throws SQLException, XAException { XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac4 = dsx.getXAConnection(); Xid xid4a= null; // We get an XAID_DUP error from networkserver when attempting // the XAResource.start below if we use the same xid. // Possibly because we're in the same jvm. // When the test is run with clientserverSuite, rather than default, // this wasn't needed, so just create a different id for client if (usingEmbedded()) xid4a = new cdsXid(4, (byte) 23, (byte) 76); else if (usingDerbyNetClient()) xid4a = new cdsXid(5, (byte) 23, (byte) 76); Connection conn4 = xac4.getConnection(); assertTrue(conn4.getAutoCommit()); Statement s4 = conn4.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.CLOSE_CURSORS_AT_COMMIT); ResultSet rs4 = s4.executeQuery("select i from autocommitxastart"); rs4.next(); assertEquals(1, rs4.getInt(1)); rs4.next(); assertEquals(2, rs4.getInt(1)); // XAResource().start should commit the transaction xac4.getXAResource().start(xid4a, XAResource.TMNOFLAGS); xac4.getXAResource().end(xid4a, XAResource.TMSUCCESS); try { rs4.next(); fail ("expected an exception indicating resultset is closed."); } catch (SQLException sqle) { // Embedded gets 08003. No current connection (DERBY-2620) if (usingDerbyNetClient()) assertSQLState("XCL16",sqle); } conn4.setAutoCommit(false); assertFalse(conn4.getAutoCommit()); rs4 = s4.executeQuery("select i from autocommitxastart"); rs4.next(); assertEquals(1, rs4.getInt(1)); rs4.next(); assertEquals(2, rs4.getInt(1)); // Get a new xid to begin another transaction. if (usingEmbedded()) xid4a = new cdsXid(4, (byte) 93, (byte) 103); else if (usingDerbyNetClient()) xid4a = new cdsXid(5, (byte) 93, (byte) 103); try { xac4.getXAResource().start(xid4a, XAResource.TMNOFLAGS); } catch (XAException xae) { if (usingEmbedded()) assertNull(xae.getMessage()); else if (usingDerbyNetClient()) { // This should give XAER_OUTSIDE exception because // the resource manager is busy in the local transaction assertTrue(xae.getMessage().indexOf("XAER_OUTSIDE") >=0 ); } assertEquals(-9, xae.errorCode); } rs4.next(); assertEquals(3, rs4.getInt(1)); rs4.close(); conn4.rollback(); conn4.close(); xac4.close(); } public void testReadOnlyToWritableTran() throws SQLException, Exception { // This fixture will run twice, once with embedded, once with client, // and insert 2 rows in addition to the 5 rows inserted during setup. // The fixture tests a commit, so before running, try to remove row // 6 and 7 in case this is the second run of the fixture. Statement s = createStatement(); s.executeUpdate("delete from autocommitxastart where i = 6"); s.executeUpdate("delete from autocommitxastart where i = 7"); // TESTING READ_ONLY TRANSACTION FOLLOWED BY WRITABLE TRANSACTION // Test following sequence of steps // 1)start a read-only global transaction // 2)finish that read-only transaction // 3)start another global transaction XADataSource dsx = J2EEDataSource.getXADataSource(); XAConnection xac5 = dsx.getXAConnection(); Xid xid5a = new cdsXid(5, (byte) 119, (byte) 129); Connection conn5 = xac5.getConnection(); Statement sru5a = conn5.createStatement(); XAResource xar = xac5.getXAResource(); xar.start(xid5a, XAResource.TMNOFLAGS); conn5.setReadOnly(true); // Read-Only XA transaction; // holdability: (hold, or close cursors over commit) , // transaction isolation: read-committed, // auto-commit false, read-only true (with embedded) if (usingEmbedded()) { assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, true, conn5); } // Note: the original test had no comments about this difference // between Embedded and DerbyNetClient, this has apparently // been accepted behavior. else if (usingDerbyNetClient()) { assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, false, conn5); } ResultSet rs5 = sru5a.executeQuery( "select count(*) from autocommitxastart"); rs5.next(); assertEquals(5, rs5.getInt(1)); rs5.close(); xar.end(xid5a, XAResource.TMSUCCESS); xar.commit(xid5a, true); conn5.close(); //now start a new transaction conn5 = xac5.getConnection(); sru5a = conn5.createStatement(); xar.start(xid5a, XAResource.TMNOFLAGS); // Writeable XA transaction // holdability: (hold, or close cursors over commit) , // transaction isolation: read-committed, // auto-commit false, read-only false assertConnectionState( ResultSet.CLOSE_CURSORS_AT_COMMIT, Connection.TRANSACTION_READ_COMMITTED, false, false, conn5); sru5a.executeUpdate("insert into autocommitxastart values 6,7"); rs5 = sru5a.executeQuery("select count(*) from autocommitxastart"); rs5.next(); assertEquals(7, rs5.getInt(1)); xar.end(xid5a, XAResource.TMSUCCESS); xar.commit(xid5a, true); conn5.close(); xac5.close(); sru5a.close(); } // test jira-derby 95 - a NullPointerException was returned when passing // an incorrect database name, should now give error: // XCY00 - invalid valid for property ... // with DataSource public void testJira95ds() throws SQLException { try { DataSource ds = JDBCDataSource.getDataSource(); // non-existent database JDBCDataSource.setBeanProperty(ds, "databaseName", "jdbc:derby:wombat"); ds.getConnection(); fail ("expected an SQLException!"); } catch (SQLException sqle) { // DERBY-2498: with client, getting a NullPointerException. // Note also: the NPE does not occur with XADataSource - see // testJira95xads(). if (usingEmbedded()) assertSQLState("XCY00", sqle); } catch (Exception e) { // DERBY-2498, when fixed, remove 'if' if (usingEmbedded()) fail ("unexpected exception: " + e.toString()); } } // test jira-derby 95 - a NullPointerException was returned when passing // an incorrect database name, should now give error XCY00 // with ConnectionPoolDataSource public void testJira95pds() throws SQLException { try { ConnectionPoolDataSource pds = J2EEDataSource.getConnectionPoolDataSource(); JDBCDataSource.setBeanProperty(pds, "databaseName", "jdbc:derby:boo"); pds.getPooledConnection(); fail ("expected an SQLException!"); } catch (SQLException sqle) { // DERBY-2498 - when fixed, remove if if (usingEmbedded()) assertSQLState("XCY00", sqle); } catch (Exception e) { // DERBY-2498 - when fixed, remove if if (usingEmbedded()) fail ("unexpected exception: " + e.toString()); } } // test jira-derby 95 - a NullPointerException was returned when passing // an incorrect database name, should now give error XCY00 // with XADataSource public void testJira95xads() throws SQLException { try { XADataSource dxs = J2EEDataSource.getXADataSource(); JDBCDataSource.setBeanProperty(dxs, "databaseName", "jdbc:derby:boo"); dxs.getXAConnection().getConnection(); fail ("expected an SQLException!"); } catch (SQLException sqle) { assertSQLState("XCY00", sqle); } catch (Exception e) { fail ("unexpected exception: " + e.toString()); } } public void testBadConnectionAttributeSyntax() throws SQLException { // DataSource - bad connattr syntax DataSource ds = JDBCDataSource.getDataSource(); JDBCDataSource.setBeanProperty(ds, "ConnectionAttributes", "bad"); try { ds.getConnection(); fail ("should have seen an error"); } catch (SQLException e) { if (usingEmbedded()) assertSQLState("XJ028", e); else if (usingDerbyNetClient()) assertSQLState("XJ212", e); } // ConnectionPoolDataSource - bad connatr syntax ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); JDBCDataSource.setBeanProperty(cpds, "ConnectionAttributes", "bad"); try { cpds.getPooledConnection(); fail ("should have seen an error"); } catch (SQLException e) { assertSQLState("XJ028", e); } // XADataSource - bad connattr syntax"); XADataSource xads = J2EEDataSource.getXADataSource(); JDBCDataSource.setBeanProperty(xads, "ConnectionAttributes", "bad"); try { xads.getXAConnection(); fail ("should have seen an error"); } catch (SQLException e) { assertSQLState("XJ028", e); } } // End testBadConnectionAttributeSyntax /** * Check that database name set using setConnectionAttributes is not used * by ClientDataSource. This method tests DERBY-1130. * * @throws SQLException */ public void testClientDSConnectionAttributes() throws SQLException { if (usingEmbedded()) return; ClientDataSource ds = new ClientDataSource(); // DataSource - EMPTY; expect error 08001 in all cases // 08001: Required Derby DataSource property databaseName not set. dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, ds); // DataSource - connectionAttributes=databaseName=<valid name> ds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, ds); ds.setConnectionAttributes(null); // Test that (invalid) database name specified in connection // attributes is not used // DataSource - databaseName=<valid db> and // connectionAttributes=databaseName=kangaroo ds.setConnectionAttributes("databaseName=kangaroo"); ds.setDatabaseName(dbName); dsConnectionRequests(new String[] {"OK","08001","OK","OK", "08001","08001","OK","OK","OK"}, ds); ds.setConnectionAttributes(null); ds.setDatabaseName(null); // now with ConnectionPoolDataSource ClientConnectionPoolDataSource cpds = new ClientConnectionPoolDataSource(); // ConnectionPoolDataSource - EMPTY dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (ConnectionPoolDataSource)cpds); // ConnectionPoolDataSource // - connectionAttributes=databaseName=<valid dbname> cpds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (ConnectionPoolDataSource)cpds); cpds.setConnectionAttributes(null); // Test that database name specified in connection attributes is // not used // ConnectionPoolDataSource - databaseName=wombat and // connectionAttributes=databaseName=kangaroo cpds.setConnectionAttributes("databaseName=kangaroo"); cpds.setDatabaseName(dbName); dsConnectionRequests(new String[] {"OK","08001","OK","OK","08001","08001","OK","OK","OK"}, (ConnectionPoolDataSource)cpds); cpds.setConnectionAttributes(null); cpds.setDatabaseName(null); // now with XADataSource ClientXADataSource xads = new ClientXADataSource(); // XADataSource - EMPTY dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (XADataSource) xads); // XADataSource - connectionAttributes=databaseName=<valid dbname> xads.setConnectionAttributes("databaseName=wombat"); dsConnectionRequests(new String[] {"08001","08001","08001","08001", "08001","08001","08001","08001","08001"}, (XADataSource) xads); xads.setConnectionAttributes(null); // Test that database name specified in connection attributes is not used // XADataSource - databaseName=wombat and // connectionAttributes=databaseName=kangaroo xads.setConnectionAttributes("databaseName=kangaroo"); xads.setDatabaseName("wombat"); dsConnectionRequests(new String[] {"OK","08001","OK","OK","08001","08001","OK","OK","OK"}, (XADataSource) xads); xads.setConnectionAttributes(null); xads.setDatabaseName(null); } // End testClientDSConnectionAttributes // Following test is similar to testClientDSConnectionAttributes, but // for embedded datasources. // This subtest does not run for network server, it uses // setAttributesAsPassword, which isn't supported for client datasources. public void testDSRequestAuthentication() throws SQLException { if (usingDerbyNetClient()) return; EmbeddedDataSource ds = new EmbeddedDataSource(); // DataSource - EMPTY dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, ds); // DataSource - connectionAttributes=databaseName=wombat"); ds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setConnectionAttributes(null); // DataSource - attributesAsPassword=true"); ds.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); // DataSource - attributesAsPassword=true, // connectionAttributes=databaseName=kangaroo"); ds.setAttributesAsPassword(true); ds.setConnectionAttributes("databaseName=kangaroo"); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); ds.setConnectionAttributes(null); // Enable Authentication; setDatabaseProperty("derby.user.fred", "wilma"); setDatabaseProperty("derby.user.APP", "APP"); setDatabaseProperty("derby.authentication.provider", "BUILTIN"); setDatabaseProperty("derby.connection.requireAuthentication", "true"); ds.setShutdownDatabase("shutdown"); try { ds.getConnection(); } catch (SQLException sqle) { assertSQLState("XJ015", sqle); } ds.setDatabaseName(null); ds.setShutdownDatabase(null); // "AUTHENTICATION NOW ENABLED"); // DataSource - attributesAsPassword=true ds.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); // ensure the DS property password is not treated as a set of // attributes. // DataSource - attributesAsPassword=true, user=fred, // password=databaseName=wombat;password=wilma ds.setAttributesAsPassword(true); ds.setUser("fred"); ds.setPassword("databaseName=" + dbName + ";password=wilma"); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, ds); ds.setAttributesAsPassword(false); ds.setUser(null); ds.setPassword(null); ds = null; // now with ConnectionPoolDataSource EmbeddedConnectionPoolDataSource cpds = new EmbeddedConnectionPoolDataSource(); // ConnectionPoolDataSource - EMPTY dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); // ConnectionPoolDataSource - // connectionAttributes=databaseName=wombat cpds.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ004", "XJ004","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); cpds.setConnectionAttributes(null); // ConnectionPoolDataSource - attributesAsPassword=true cpds.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); cpds.setAttributesAsPassword(false); // ensure the DS property password is not treated as a set of // attributes. // ConnectionPoolDataSource - attributesAsPassword=true, // user=fred, password=databaseName=wombat;password=wilma"); cpds.setAttributesAsPassword(true); cpds.setUser("fred"); cpds.setPassword("databaseName=" + dbName + ";password=wilma"); dsConnectionRequests(new String[] { "XJ004","XJ004","XJ004","XJ028", "XJ028","XJ004","XJ004","XJ004","XJ004"}, (ConnectionPoolDataSource)cpds); cpds.setAttributesAsPassword(false); cpds.setUser(null); cpds.setPassword(null); cpds = null; // now with XADataSource EmbeddedXADataSource xads = new EmbeddedXADataSource(); // XADataSource - EMPTY dsConnectionRequests(new String[] { "08006","08006","08006","08006", "08006","08006","08006","08006","08006"}, (XADataSource) xads); // XADataSource - databaseName=wombat xads.setDatabaseName(dbName); dsConnectionRequests(new String[] { "08004","08004","08004","OK", "08004","08004","08004","08004","08004"}, (XADataSource) xads); xads.setDatabaseName(null); // XADataSource - connectionAttributes=databaseName=wombat"); xads.setConnectionAttributes("databaseName=" + dbName); dsConnectionRequests(new String[] { "08006","08006","08006","08006", "08006","08006","08006","08006","08006"}, (XADataSource) xads); xads.setConnectionAttributes(null); // XADataSource - attributesAsPassword=true xads.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "08006","08006","08006","08006", "08006","08006","08006","08006","08006"}, (XADataSource) xads); xads.setAttributesAsPassword(false); // XADataSource - databaseName=wombat, attributesAsPassword=true xads.setDatabaseName(dbName); xads.setAttributesAsPassword(true); dsConnectionRequests(new String[] { "08004","08004","08004","XJ028", "XJ028","08004","08004","OK","08004"}, (XADataSource) xads); xads.setAttributesAsPassword(false); xads.setDatabaseName(null); setDatabaseProperty("derby.connection.requireAuthentication", "false"); TestConfiguration.getCurrent().shutdownDatabase(); } /** * Check that traceFile connection attribute functions correctly. * tracefile was tested in checkDriver, but not for DataSources. * tracefile= was used in datasourcepermissions_net, but that's * incorrect syntax. Note that we're not checking the contents of * the tracefile. * * Note also that this test cannot run against a remote server. * * @throws SQLException */ public void testClientTraceFileDSConnectionAttribute() throws SQLException { if (usingEmbedded()) return; String traceFile; // DataSource DataSource ds = JDBCDataSource.getDataSource(); // DataSource - setTransationAttributes traceFile = "trace1.out"; JDBCDataSource.setBeanProperty(ds, "connectionAttributes", "traceFile="+traceFile); // In this scenario, we *only* get a tracefile, if we first get a // successful connection, followed by an unsuccessful connection. // So, we cannot just use ds.getConnection() dsGetBadConnection(ds); JDBCDataSource.clearStringBeanProperty(ds, "connectionAttributes"); // DataSource - setTraceFile traceFile = "trace2.out"; JDBCDataSource.setBeanProperty(ds, "traceFile", traceFile); ds.getConnection(); ds = null; // now with ConnectionPoolDataSource ConnectionPoolDataSource cpds = J2EEDataSource.getConnectionPoolDataSource(); traceFile = "trace3.out"; JDBCDataSource.setBeanProperty(cpds, "connectionAttributes", "traceFile="+traceFile); // DERBY-2468 - trace3.out does not get created ((ClientConnectionPoolDataSource) cpds).getConnection(); JDBCDataSource.clearStringBeanProperty(cpds, "connectionAttributes"); traceFile = "trace4.out"; JDBCDataSource.setBeanProperty(cpds, "traceFile", traceFile); ((ClientConnectionPoolDataSource) cpds).getConnection(); cpds = null; // now with XADataSource XADataSource xads = J2EEDataSource.getXADataSource(); traceFile = "trace5.out"; JDBCDataSource.setBeanProperty(xads, "connectionAttributes", "traceFile="+traceFile); ((ClientXADataSource) xads).getConnection(); // DERBY-2468 - trace5.out does not get created JDBCDataSource.clearStringBeanProperty(xads, "connectionAttributes"); traceFile = "trace6.out"; JDBCDataSource.setBeanProperty(xads, "traceFile", traceFile); ((ClientXADataSource) xads).getConnection(); assertTraceFilesExist(); } /* -- Helper Methods for testClientTraceFileDSConnectionAttribute -- */ private static void dsGetBadConnection(DataSource ds) { // first get a good connection, or we don't get a // traceFile when using connectionattributes. // also, we do not get a tracefile that way unless // we see an error. // with setTraceFile, we *always* get a file, even // with just a successful connection. try { ds.getConnection(); ds.getConnection(null, null); fail("expected an sqlException"); } catch (SQLException sqle) { assertSQLState("08001", sqle); } } /** * Check that trace file exists in <framework> directory */ private static void assertTraceFilesExist() { AccessController.doPrivileged(new java.security.PrivilegedAction() { public Object run() { for (int i=0 ; i < 6 ; i++) { String traceFileName = "trace" + (i+1) + ".out"; File traceFile = new File(traceFileName); if (i == 2 || i == 4) continue; else { assertTrue(traceFile.exists()); } } return null; } }); } /** * Check that messageText connection attribute functions correctly. * retrievemessagetext was tested in checkdriver, and derbynet/testij, * but not tested for datasources, and in datasourcepermissions_net, * but as it has nothing to do with permissions/authentication, * this test seems a better place for it. * * @throws SQLException */ public void testClientMessageTextConnectionAttribute() throws SQLException { if (usingEmbedded()) return; String retrieveMessageTextProperty = "retrieveMessageText"; Connection conn; // DataSource // DataSource - retrieveMessageTextProperty ClientDataSource ds = new ClientDataSource(); ds.setDatabaseName(dbName); ds.setConnectionAttributes(retrieveMessageTextProperty + "=false"); conn = ds.getConnection(); assertMessageText(conn,"false"); conn.close(); // now try with retrieveMessageText = true ds.setConnectionAttributes(retrieveMessageTextProperty + "=true"); conn = ds.getConnection(); assertMessageText(conn,"true"); ds.setConnectionAttributes(null); conn.close(); // now with ConnectionPoolDataSource // ConnectionPoolDataSource - retrieveMessageTextProperty ClientConnectionPoolDataSource cpds = new ClientConnectionPoolDataSource(); cpds.setDatabaseName(dbName); cpds.setConnectionAttributes( retrieveMessageTextProperty + "=false"); conn = cpds.getConnection(); assertMessageText(conn,"false"); conn.close(); cpds.setConnectionAttributes( retrieveMessageTextProperty + "=true"); conn = cpds.getConnection(); assertMessageText(conn,"true"); cpds.setConnectionAttributes(null); conn.close(); // now with XADataSource ClientXADataSource xads = new ClientXADataSource(); //XADataSource - retrieveMessageTextProperty xads.setDatabaseName(dbName); xads.setConnectionAttributes( retrieveMessageTextProperty + "=false"); conn = xads.getConnection(); assertMessageText(conn,"false"); conn.close(); xads.setConnectionAttributes( retrieveMessageTextProperty + "=true"); conn = xads.getConnection(); assertMessageText(conn,"true"); conn.close(); xads.setConnectionAttributes(null); } /* -- Helper Method for testClientMessageTextDSConnectionAttribute -- */ private static void assertMessageText( Connection conn, String retrieveMessageTextValue) throws SQLException { try { conn.createStatement().executeQuery("SELECT * FROM APP.NOTTHERE"); } catch (SQLException e) { assertSQLState("42X05", e); if (retrieveMessageTextValue.equals("true") ) { assertTrue(e.getMessage().indexOf("does not exist") >= 0); } else { // retrieveMessageTextValue is false assertTrue(e.getMessage().indexOf("does not exist") == -1); } } } /** * Check that messageText connection attribute functions correctly. * retrievemessagetext was tested in checkdriver, and derbynet/testij * (but not tested for datasources), and in datasourcepermissions_net, * but as it has nothing to do with permissions/authentication, * this test seems a better place for it. * * @throws SQLException */ public void testDescriptionProperty() throws SQLException, Exception { // DataSource - setDescription subTestDataSourceDescription(JDBCDataSource.getDataSource()); // ConnectionPoolDataSource - setDescription subTestDataSourceDescription( (DataSource) J2EEDataSource.getConnectionPoolDataSource()); // XADataSource - setDescription subTestDataSourceDescription( (DataSource) J2EEDataSource.getXADataSource()); } /** * Utility method for testing setting and fetching the description * property on a data source. */ private void subTestDataSourceDescription(DataSource ds) throws Exception { String setDescription = "Everything you ever wanted to know about this datasource"; JDBCDataSource.setBeanProperty(ds, "description", setDescription); ds.getConnection(); assertEquals(setDescription, JDBCDataSource.getBeanProperty(ds, "description")); JDBCDataSource.clearStringBeanProperty(ds, "description"); assertNull(JDBCDataSource.getBeanProperty(ds, "description")); } /* ------------------ JDBC30 (and up) Fixtures ------------------ */ public void testXAHoldability() throws SQLException, XAException { // DERBY-2533 - // This test, when run with Network server / DerbyNetClient // leaves the database is a bad state which results in a // network protocol error if (usingDerbyNetClient()) return; // START XA HOLDABILITY TEST XADataSource dscsx = J2EEDataSource.getXADataSource(); XAConnection xac = dscsx.getXAConnection(); XAResource xr = xac.getXAResource(); Xid xid = new cdsXid(25, (byte) 21, (byte) 01); Connection conn1 = xac.getConnection(); // check that autocommit is true; default for a connection assertTrue(conn1.getAutoCommit()); // check that holdability is HOLD_CURSORS_OVER_COMMIT in a default // CONNECTION(not in xa transaction yet) assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, conn1.getHoldability()); // start a global transaction and default holdability and // autocommit will be switched to match Derby XA restrictions xr.start(xid, XAResource.TMNOFLAGS); // So, now autocommit should be false for connection because it is // part of the global transaction assertFalse(conn1.getAutoCommit()); // Connection's holdability is now CLOSE_CURSORS_AT_COMMIT because // it is part of the global transaction assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, conn1.getHoldability()); xr.end(xid, XAResource.TMSUCCESS); conn1.commit(); conn1.close(); xid = new cdsXid(27, (byte) 21, (byte) 01); xr.start(xid, XAResource.TMNOFLAGS); conn1 = xac.getConnection(); // CONNECTION(in xa transaction) HOLDABILITY: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, conn1.getHoldability()); // Autocommit on Connection inside global transaction should be false assertFalse(conn1.getAutoCommit()); xr.end(xid, XAResource.TMSUCCESS); conn1.rollback(); Connection conn = xac.getConnection(); conn.setAutoCommit(false); conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); // CONNECTION(non-xa transaction) HOLDABILITY: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability()); Statement s = conn.createStatement(); // STATEMENT HOLDABILITY: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); s.executeUpdate("insert into hold_30 values " + "(1,'init2'), (2, 'init3'), (3,'init3')"); s.executeUpdate("insert into hold_30 values " + "(4,'init4'), (5, 'init5'), (6,'init6')"); s.executeUpdate("insert into hold_30 values " + "(7,'init7'), (8, 'init8'), (9,'init9')"); // STATEMENT HOLDABILITY : assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); Statement sh = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); PreparedStatement psh = conn.prepareStatement( "select id from hold_30 for update", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); CallableStatement csh = conn.prepareCall( "select id from hold_30 for update", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // STATEMENT HOLDABILITY : assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, sh.getResultSetHoldability()); // PREPARED STATEMENT HOLDABILITY : assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, psh.getResultSetHoldability()); // CALLABLE STATEMENT HOLDABILITY : assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, csh.getResultSetHoldability()); ResultSet rsh = sh.executeQuery("select id from hold_30 for update"); rsh.next(); assertEquals(1, rsh.getInt(1)); // H@1 id rsh.next(); assertEquals(2, rsh.getInt(1)); // H@2 id conn.commit(); rsh.next(); assertEquals(3, rsh.getInt(1)); // H@3 id conn.commit(); xid = new cdsXid(23, (byte) 21, (byte) 01); xr.start(xid, XAResource.TMNOFLAGS); Statement stmtInsideGlobalTransaction = conn.createStatement(); PreparedStatement prepstmtInsideGlobalTransaction = conn.prepareStatement("select id from hold_30"); CallableStatement callablestmtInsideGlobalTransaction = conn.prepareCall("select id from hold_30"); // CONNECTION(xa) HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability()); // STATEMENT(this one was created with holdability false, outside the // global transaction. Check its holdability inside global transaction assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); // STATEMENT(this one was created with holdability true, // outside the global transaction. Check its holdability inside // global transaction: // DERBY-2531: network server / DerbyNetClient has a different value // than embedded. if (usingEmbedded()) assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sh.getResultSetHoldability()); else if (usingDerbyNetClient()) assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, sh.getResultSetHoldability()); // STATEMENT(this one was created with default holdability inside this // global transaction. Check its holdability: assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, stmtInsideGlobalTransaction.getResultSetHoldability()); // PREPAREDSTATEMENT(this one was created with default holdability // inside this global transaction. Check its holdability: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, prepstmtInsideGlobalTransaction.getResultSetHoldability()); // CALLABLESTATEMENT(this one was created with default holdability // inside this global transaction. Check its holdability: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, callablestmtInsideGlobalTransaction.getResultSetHoldability()); ResultSet rsx = s.executeQuery("select id from hold_30 for update"); rsx.next(); assertEquals(1, rsx.getInt(1)); // X@1 id rsx.next(); assertEquals(2, rsx.getInt(1)); // X@2 id xr.end(xid, XAResource.TMSUCCESS); // result set should not be useable, since it is part of a detached // XAConnection try { rsx.next(); fail("rsx's connection not active id "); } catch (SQLException sqle) { assertSQLState("08003", sqle); } // result set should not be useable, it should have been closed by // the xa start. try { rsh.next(); fail("rsh's connection not active id "); } catch (SQLException sqle) { if (usingEmbedded()) assertSQLState("08003", sqle); else if (usingDerbyNetClient()) assertSQLState("XCL16", sqle); } // resume XA transaction and keep using rs"); xr.start(xid, XAResource.TMJOIN); Statement stmtAfterGlobalTransactionResume = conn.createStatement(); PreparedStatement prepstmtAfterGlobalTransactionResume = conn.prepareStatement("select id from hold_30"); CallableStatement callablestmtAfterGlobalTransactionResume = conn.prepareCall("select id from hold_30"); // Check holdability of various jdbc objects after resuming XA // transaction // CONNECTION(xa) HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT,conn.getHoldability()); // STATEMENT(this one was created with holdability false, outside the // global transaction. Check its holdability inside global transaction assertEquals( ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); // STATEMENT(this one was created with holdability true, outside the // global transaction. Check its holdability inside global transaction if (usingEmbedded()) assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sh.getResultSetHoldability()); else if (usingDerbyNetClient()) assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, sh.getResultSetHoldability()); // STATEMENT(this one was created with default holdability inside the // global transaction when it was first started. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, stmtInsideGlobalTransaction.getResultSetHoldability()); // PREPAREDSTATEMENT(this one was created with default holdability // inside the global transaction when it was first started. Check its // holdability) assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, prepstmtInsideGlobalTransaction.getResultSetHoldability()); // CALLABLESTATEMENT(this one was created with default holdability // inside the global transaction when it was first started. Check its // holdability) HOLDABILITY assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, callablestmtInsideGlobalTransaction.getResultSetHoldability()); // STATEMENT(this one was created with default holdability after the // global transaction was resumed. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, stmtAfterGlobalTransactionResume.getResultSetHoldability()); // PREPAREDSTATEMENT(this one was created with default holdability // after the global transaction was resumed. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, prepstmtAfterGlobalTransactionResume.getResultSetHoldability()); // CALLABLESTATEMENT(this one was created with default holdability // after the global transaction was resumed. Check its holdability assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, callablestmtAfterGlobalTransactionResume.getResultSetHoldability()); // DERBY-1370 if (usingEmbedded()) { // Network XA BUG gives result set closed rsx.next(); assertEquals(3, rsx.getInt(1)); // X@3 id } xr.end(xid, XAResource.TMSUCCESS); if (xr.prepare(xid) != XAResource.XA_RDONLY) xr.commit(xid, false); // try again once the xa transaction has been committed. try { rsx.next(); fail("rsx's connection not active id (B)"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } try { rsh.next(); fail ("rsh's should be closed (B)"); } catch (SQLException sqle) { assertSQLState("XCL16", sqle); } // Set connection to hold conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); // CONNECTION(held) HOLDABILITY: assertEquals(ResultSet.HOLD_CURSORS_OVER_COMMIT, conn.getHoldability()); xid = new cdsXid(24, (byte) 21, (byte) 01); xr.start(xid, XAResource.TMNOFLAGS); // CONNECTION(xa) HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, conn.getHoldability()); try { conn.setHoldability(ResultSet.HOLD_CURSORS_OVER_COMMIT); fail("allowed to set hold mode in xa transaction"); } catch (SQLException sqle) { assertSQLState("XJ05C", sqle); } // JDBC 4.0 (proposed final draft) section 16.1.3.1 allows Statements // to be created with a different holdability if the driver cannot // support it. In this case the driver does not support holdability in // a global transaction, so a valid statement is returned with close // cursors on commit. Statement shxa = conn.createStatement(ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // HOLDABLE Statement in global xact " assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); assertEquals(10000, conn.getWarnings().getErrorCode()); shxa.close(); shxa = conn.prepareStatement("select id from hold_30", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // HOLDABLE PreparedStatement in global xact assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); assertEquals(10000, conn.getWarnings().getErrorCode()); shxa.close(); shxa = conn.prepareCall("CALL SYSCS_UTIL.SYSCS_CHECKPOINT_DATABASE()", ResultSet.TYPE_FORWARD_ONLY, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); // HOLDABLE CallableStatement in global xact: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, s.getResultSetHoldability()); assertEquals(10000, conn.getWarnings().getErrorCode()); shxa.close(); // check we can use a holdable statement set up in local mode. // holdability is downgraded, tested in XATest.java // DERBY-1370 if(usingEmbedded()) { // STATEMENT HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, sh.getResultSetHoldability()); sh.executeQuery("select id from hold_30").close(); sh.execute("select id from hold_30"); sh.getResultSet().close(); // PREPARED STATEMENT HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, psh.getResultSetHoldability()); psh.executeQuery().close(); psh.execute(); psh.getResultSet().close(); // CALLABLE STATEMENT HOLDABILITY: assertEquals(ResultSet.CLOSE_CURSORS_AT_COMMIT, csh.getResultSetHoldability()); csh.executeQuery().close(); csh.execute(); csh.getResultSet().close(); } // but an update works sh.executeUpdate("insert into hold_30 values(10, 'init10')"); xr.end(xid, XAResource.TMSUCCESS); // CONNECTION(held) HOLDABILITY: assertEquals( ResultSet.HOLD_CURSORS_OVER_COMMIT, conn.getHoldability()); s.close(); sh.close(); csh.close(); psh.close(); rsx.close(); stmtInsideGlobalTransaction.close(); prepstmtInsideGlobalTransaction.close(); callablestmtInsideGlobalTransaction.close(); stmtAfterGlobalTransactionResume.close(); prepstmtAfterGlobalTransactionResume.close(); callablestmtAfterGlobalTransactionResume.close(); conn.close(); xac.close(); TestConfiguration.getCurrent().shutdownDatabase(); // END XA HOLDABILITY TEST"); } /** * Tests for DERBY-1144 * * This test tests that holdability, autocomit, and transactionIsolation * are reset on getConnection for PooledConnections obtaind from * connectionPoolDataSources * * DERBY-1134 has been filed for more comprehensive testing of client * connection state. * * @throws SQLException */ public void timeoutTestDerby1144PooledDS() throws SQLException { PooledConnection pc1 = null; // Test holdability ConnectionPoolDataSource ds = J2EEDataSource.getConnectionPoolDataSource(); pc1 = ds.getPooledConnection(); assertPooledConnHoldability("PooledConnection", pc1); pc1.close(); // Test autocommit pc1 = ds.getPooledConnection(); assertPooledConnAutoCommit("PooledConnection", pc1); pc1.close(); // Test pooled connection isolation pc1 = ds.getPooledConnection(); assertPooledConnIso("PooledConnection" , pc1); pc1.close(); } public void timeoutTestDerby1144XADS() throws SQLException { XADataSource xds = J2EEDataSource.getXADataSource(); // Test xa connection isolation XAConnection xpc1 = xds.getXAConnection(); assertPooledConnIso("XAConnection", xpc1); xpc1.close(); } /* -------------- Helper Methods for testDerby1144 -------------- */ /** * Make sure autocommit gets reset on PooledConnection.getConnection() * @param desc description of connection * @param pc1 pooled connection to test * @throws SQLException */ private static void assertPooledConnAutoCommit( String desc, PooledConnection pc1) throws SQLException { // ** Verify autoCommit state Connection conn = pc1.getConnection(); conn.setAutoCommit(true); // reset the connection and see if the autocommit has changed conn = pc1.getConnection(); boolean autocommit = conn.getAutoCommit(); // autocommit should get reset on getConnection assertTrue(autocommit); conn.close(); } /** * Checks that Holdability gets reset on PooledConnection.getConnection() * @param desc * @param pc1 * @throws SQLException */ private static void assertPooledConnHoldability( String desc, PooledConnection pc1) throws SQLException { // **Test holdability state Connection conn = pc1.getConnection(); conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); // reset the connection and see if the holdability gets reset // to HOLD_CURSORS_OVER_COMMIT conn = pc1.getConnection(); assertConnHoldability(conn, ResultSet.HOLD_CURSORS_OVER_COMMIT); conn.close(); } /** * Verify connection holdablity is expected holdability * @param conn * @param expectedHoldability * * @throws SQLException */ private static void assertConnHoldability( Connection conn, int expectedHoldability) throws SQLException { int holdability = conn.getHoldability(); assertEquals (expectedHoldability, holdability); } /** * Test that isolation is reset on PooledConnection.getConnection() * @param pooledConnType Descripiton of the type of pooled connection * @param pc PooledConnection or XAConnection * @throws SQLException */ private void assertPooledConnIso( String pooledConnType, PooledConnection pc) throws SQLException { Connection conn = pc.getConnection(); setupDerby1144Table(conn); // *** Test isolation level reset on conntype.getConnection() conn.setTransactionIsolation(Connection.TRANSACTION_READ_UNCOMMITTED); assertIsoLocks(conn, Connection.TRANSACTION_READ_UNCOMMITTED); conn.close(); //Get a new connection with pooledConnType.getConnection() // Isolation level should be reset to READ_COMMITTED Connection newconn = pc.getConnection(); assertIsoLocks(newconn, Connection.TRANSACTION_READ_COMMITTED); } /* * insert two rows into the simple table for DERBY-1144 tests * @param conn * @throws SQLException */ private static void setupDerby1144Table(Connection conn) throws SQLException { Statement stmt = conn.createStatement(); stmt.executeUpdate("INSERT INTO intTable VALUES(1)"); stmt.executeUpdate("INSERT INTO intTable VALUES(2)"); conn.commit (); } /* * Checks locks for designated isolation level on the connection. * Currently only supports TRANSACTION_READ_COMMITTED and * TRANSACTION_READ_UNCOMMITTED * @param conn Connection to test * @param isoLevel expected isolation level * */ private void assertIsoLocks(Connection conn, int expectedIsoLevel) throws SQLException { int conniso = conn.getTransactionIsolation(); assertEquals(expectedIsoLevel, conniso); boolean selectTimedOut = selectTimesoutDuringUpdate(conn); // expect a lock timeout for READ_COMMITTED switch (conniso) { case Connection.TRANSACTION_READ_UNCOMMITTED: assertFalse(selectTimedOut); break; case Connection.TRANSACTION_READ_COMMITTED: assertTrue(selectTimedOut); break; default: System.out.println("No test support for isolation level"); } } /* * Determine if a select on this connection during update will timeout. * Used to establish isolation level. If the connection isolation level * is <code> Connection.TRANSACTION_READ_UNCOMMITTED </code> it will not * timeout. Otherwise it should. * * @param conn Connection to test. * @return true if the select got a lock timeout, false otherwise. */ private boolean selectTimesoutDuringUpdate(Connection conn) throws SQLException { Connection updateConn=null; conn.setAutoCommit(false); try { // create another connection and do an update but don't commit updateConn = openDefaultConnection(); updateConn.setAutoCommit(false); // First update the rows on the update connection Statement upStmt = updateConn.createStatement(); upStmt.executeUpdate("update intTable set i = 3"); // now see if we can select them Statement stmt = conn.createStatement(); ResultSet rs = stmt.executeQuery("Select * from intTable"); while (rs.next()){}; rs.close(); } catch (SQLException e) { if (e.getSQLState().equals("40XL1")) { // If we got a lock timeout this is not read uncommitted return true; } } finally { try { conn.rollback(); updateConn.rollback(); }catch (SQLException se) { se.printStackTrace(); } } return false; } /* -------------------- Other Helper Methods -------------------- */ private void assertConnectionState( int expectedHoldability, int expectedIsolation, boolean expectedCommitSetting, boolean expectedReadOnly, Connection conn) throws SQLException { assertEquals(expectedHoldability, conn.getHoldability()); assertEquals(expectedIsolation, conn.getTransactionIsolation()); assertEquals(expectedCommitSetting, conn.getAutoCommit()); assertEquals(expectedReadOnly, conn.isReadOnly()); } private static void setDatabaseProperty(String property, String value) throws SQLException { DataSource ds = JDBCDataSource.getDataSource(); Connection cadmin = ds.getConnection(); CallableStatement cs = cadmin.prepareCall( "CALL SYSCS_UTIL.SYSCS_SET_DATABASE_PROPERTY(?, ?)"); cs.setString(1, property); cs.setString(2, value); cs.execute(); cs.close(); cadmin.close(); } private void setHoldability(Connection conn, boolean hold) throws SQLException { conn.setHoldability(hold ? ResultSet.HOLD_CURSORS_OVER_COMMIT : ResultSet.CLOSE_CURSORS_AT_COMMIT); } private static void dsConnectionRequests( String[] expectedValues, DataSource ds) { // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(ds, "javax.sql.DataSource"); } try { ds.getConnection(); if (!expectedValues[0].equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValues[0], sqle); } dsConnectionRequest(expectedValues[1], ds, null, null); dsConnectionRequest(expectedValues[2], ds, "fred", null); dsConnectionRequest(expectedValues[3], ds, "fred", "wilma"); dsConnectionRequest(expectedValues[4], ds, null, "wilma"); dsConnectionRequest( expectedValues[5], ds, null, "databaseName=wombat"); dsConnectionRequest( expectedValues[6], ds, "fred", "databaseName=wombat"); dsConnectionRequest(expectedValues[7], ds, "fred", "databaseName=wombat;password=wilma"); dsConnectionRequest(expectedValues[8], ds, "fred", "databaseName=wombat;password=betty"); } private static void dsConnectionRequest( String expectedValue, DataSource ds, String user, String ConnAttr) { try { ds.getConnection(user, ConnAttr); if (!expectedValue.equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValue, sqle); } } private static void dsConnectionRequests( String[] expectedValues, ConnectionPoolDataSource ds) { try { ds.getPooledConnection(); if (!expectedValues[0].equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValues[0], sqle); } dsConnectionRequest(expectedValues[1], ds, null, null); dsConnectionRequest(expectedValues[2], ds, "fred", null); dsConnectionRequest(expectedValues[3], ds, "fred", "wilma"); dsConnectionRequest(expectedValues[4], ds, null, "wilma"); dsConnectionRequest( expectedValues[5], ds, null, "databaseName=wombat"); dsConnectionRequest( expectedValues[6], ds, "fred", "databaseName=wombat"); dsConnectionRequest(expectedValues[7], ds, "fred", "databaseName=wombat;password=wilma"); dsConnectionRequest(expectedValues[8], ds, "fred", "databaseName=wombat;password=betty"); } private static void dsConnectionRequest(String expectedValue, ConnectionPoolDataSource ds, String user, String ConnAttr) { try { ds.getPooledConnection(user, ConnAttr); if (!expectedValue.equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValue, sqle); } } private static void dsConnectionRequests( String[] expectedValues, XADataSource ds) { try { ds.getXAConnection(); if (!expectedValues[0].equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValues[0], sqle); } dsConnectionRequest(expectedValues[1], ds, null, null); dsConnectionRequest(expectedValues[2], ds, "fred", null); dsConnectionRequest(expectedValues[3], ds, "fred", "wilma"); dsConnectionRequest(expectedValues[4], ds, null, "wilma"); dsConnectionRequest( expectedValues[5], ds, null, "databaseName=" + dbName); dsConnectionRequest( expectedValues[6], ds, "fred", "databaseName=" + dbName); dsConnectionRequest(expectedValues[7], ds, "fred", "databaseName=" + dbName + ";password=wilma"); dsConnectionRequest(expectedValues[8], ds, "fred", "databaseName=" + dbName + ";password=betty"); } private static void dsConnectionRequest(String expectedValue, XADataSource ds, String user, String ConnAttr) { try { ds.getXAConnection(user, ConnAttr); if (!expectedValue.equals("OK")) fail (" expected connection to fail, but was OK"); } catch (SQLException sqle) { assertSQLState(expectedValue, sqle); } } protected void assertXAException(String tag, XAException xae) { // for all our cases, we expect some kind of closed con error // but the message is different for embedded vs. network server if (usingEmbedded()) assertEquals("No current connection.", xae.getMessage()); else if (usingDerbyNetClient()) assertEquals( "XAER_RMFAIL : No current connection.", xae.getMessage()); Throwable t = xae.getCause(); if (t instanceof SQLException) assertSQLState("08003", (SQLException)t); } private static void queryOnStatement(String expectedCursorName, int[] expectedValues, Connection conn, Statement s) throws SQLException { try { // DERBY-2531 // network server gives mismatched connections. See also // comment in testAllDataSources() if (usingEmbedded()) assertEquals(conn, s.getConnection()); resultSetQuery(expectedCursorName, expectedValues, s.executeQuery("select * from intTable")); } catch (SQLException sqle) { fail (" did not expect sql exception"); } } private static void resultSetQuery(String expectedCursorName, int[] expectedValues, ResultSet rs) throws SQLException { // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(rs, "java.sql.ResultSet"); } assertEquals(expectedCursorName, rs.getCursorName()); int index=0; while (rs.next()) { assertEquals(expectedValues[index], rs.getInt(1)); index++; } assertEquals(expectedValues.length, index++); rs.close(); } private static void assertLocks(int[] expectedValues, Connection conn) throws SQLException { Statement s = conn.createStatement(); ResultSet rs = s.executeQuery( "SELECT XID, sum(cast (LOCKCOUNT AS INT)) " + "FROM SYSCS_DIAG.LOCK_TABLE AS L GROUP BY XID"); // Don't output actual XID's as they tend for every catalog change // to the system. int xact_index = 0; while (rs.next()) { if (expectedValues != null) assertEquals(expectedValues[xact_index], rs.getInt(2)); else fail("expected no locks"); xact_index++; } if (expectedValues != null) assertEquals(expectedValues.length, xact_index); rs.close(); s.close(); } private void assertStatementState(int[] parameterExpectedValues, int[] expectedValues, Statement s) throws SQLException { assertEquals(expectedValues[0], s.getResultSetType()); assertEquals( expectedValues[1], s.getResultSetConcurrency()); assertEquals( expectedValues[2], s.getFetchDirection()); assertEquals(expectedValues[3], s.getFetchSize()); assertEquals(expectedValues[4], s.getMaxFieldSize()); assertEquals(expectedValues[5], s.getMaxRows()); assertEquals(expectedValues[6], s.getResultSetHoldability()); if (s instanceof PreparedStatement) { PreparedStatement ps = (PreparedStatement) s; ParameterMetaData psmd = ps.getParameterMetaData(); // Parameter count: assertEquals(parameterExpectedValues[0], psmd.getParameterCount()); for (int i = 1; i <= psmd.getParameterCount(); i++) { assertEquals(parameterExpectedValues[i], psmd.getParameterType(i)); } } } /** Create a statement with modified State. */ private Statement createFloatStatementForStateChecking( int[] StatementExpectedValues, Connection conn) throws SQLException { Statement s = internalCreateFloatStatementForStateChecking(conn); s.setCursorName("StokeNewington"); s.setFetchDirection(ResultSet.FETCH_REVERSE); s.setFetchSize(444); s.setMaxFieldSize(713); s.setMaxRows(19); // Create assertStatementState(null, StatementExpectedValues, s); return s; } private Statement internalCreateFloatStatementForStateChecking( Connection conn) throws SQLException { return conn.createStatement( ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } private PreparedStatement createFloatStatementForStateChecking( int[] parameterExpectedValues, int[] PreparedStatementExpectedValues, Connection conn, String sql) throws SQLException { PreparedStatement s = internalCreateFloatStatementForStateChecking(conn, sql); s.setCursorName("StokeNewington"); s.setFetchDirection(ResultSet.FETCH_REVERSE); s.setFetchSize(888); s.setMaxFieldSize(317); s.setMaxRows(91); // PreparedStatement Create assertStatementState( parameterExpectedValues, PreparedStatementExpectedValues, s); return s; } private PreparedStatement internalCreateFloatStatementForStateChecking( Connection conn, String sql) throws SQLException { return conn.prepareStatement(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } private CallableStatement createFloatCallForStateChecking( int[] parameterExpectedValues, int[] CallableStatementExpectedValues, Connection conn, String sql) throws SQLException { CallableStatement s = internalCreateFloatCallForStateChecking(conn, sql); s.setCursorName("StokeNewington"); s.setFetchDirection(ResultSet.FETCH_REVERSE); s.setFetchSize(999); s.setMaxFieldSize(137); s.setMaxRows(85); // Callable Statement Create assertStatementState( parameterExpectedValues, CallableStatementExpectedValues, s); return s; } private CallableStatement internalCreateFloatCallForStateChecking( Connection conn, String sql) throws SQLException { return conn.prepareCall(sql, ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY, ResultSet.HOLD_CURSORS_OVER_COMMIT); } private void assertConnectionOK( Object[] expectedValues, String dsName, Connection conn) throws SQLException { assertEquals( ((Integer)expectedValues[0]).intValue(), conn.getHoldability()); // check it's a 3.0 connection object by checking if // set & release Savepoint is ok. try { conn.releaseSavepoint(conn.setSavepoint()); if (conn.getAutoCommit()) fail("expected a SQLExpection (savepoint with autocommit on"); if (!((String)expectedValues[1]).equals("OK")) fail("expected a SQLExpection (savepoint with autocommit on"); } catch (SQLException sqle) { // we expect savepoints exceptions because either // it's a global transaction, or it's in auto commit mode. if (conn.getAutoCommit()) assertSQLState("XJ010", sqle); else if (((String)expectedValues[1]).equals("OK")) fail ("unexpected JDBC 3.0 savepoint SQL Exception"); else assertSQLState((String)expectedValues[1], sqle); } // Running connection checks // connection checks currently only implemented for Embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(conn, "java.sql.Connection"); SecurityCheck.assertSourceSecurity( conn.getMetaData(), "java.sql.DatabaseMetaData"); } assertEquals(((Integer)expectedValues[2]).intValue(), conn.getTransactionIsolation()); assertEquals(((Boolean)expectedValues[3]).booleanValue(), conn.getAutoCommit()); assertEquals(((Boolean)expectedValues[4]).booleanValue(), conn.isReadOnly()); if (dsName.endsWith("DataSource")) assertNull(conn.getWarnings()); Statement s1 = conn.createStatement(); assertStatementOK(dsName, conn, s1); assertStatementOK(dsName, conn, conn.createStatement (ResultSet.TYPE_SCROLL_INSENSITIVE, ResultSet.CONCUR_READ_ONLY)); Connection c1 = conn.getMetaData().getConnection(); // c1 and conn should be the same connection object. if (!usingDerbyNetClient() && dsName.indexOf("DataSource")>=0) assertEquals(c1, conn); // Derby-33 - setTypeMap on connection try { conn.setTypeMap(java.util.Collections.EMPTY_MAP); if (!((String)expectedValues[5]).equals("OK")) fail (" expected an sqlexception on setTypeMap(EMPTY_MAP)"); } catch (SQLException sqle) { if (((String)expectedValues[5]).equals("OK")) fail ("setTypeMap(EMPTY_MAP) failed "); else assertSQLState((String)expectedValues[5], sqle); } try { // expect 0A000 - not implemented for client, // XJ081 - invalid null value passed as map for embedded conn.setTypeMap(null); fail ("setTypeMap(null) should throw exception"); } catch (SQLException sqle) { assertSQLState((String)expectedValues[6], sqle); } try { // a populated map, not implemented java.util.Map map = new java.util.HashMap(); map.put("name", "class"); conn.setTypeMap(map); if (!((String)expectedValues[7]).equals("OK")) fail (" expected an sqlexception on setTypeMap(map)"); } catch (SQLException sqle) { if (((String)expectedValues[7]).equals("OK")) fail ("setTypeMap(valid value) failed "); else assertSQLState((String)expectedValues[7], sqle); } assertConnectionPreClose(dsName, conn); conn.close(); // method calls on a closed connection try { conn.close(); // expect no error } catch (SQLException sqle) { fail(" unexpected exception on <closedconn>.close() "); } try { conn.createStatement(); fail (dsName + " <closedconn>.createStatement(), " + "expected 08003 - No current connection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } try { s1.execute("values 1"); fail(dsName + " <closedstmt>.execute(), " + "expected 08003 - No current connection"); } catch (SQLException sqle) { assertSQLState("08003", sqle); } } private void assertConnectionPreClose(String dsName, Connection conn) throws SQLException { // before closing the connection, attempt to change holdability // and readOnly conn.setHoldability(ResultSet.CLOSE_CURSORS_AT_COMMIT); if (!dsName.equals("Nested2")) { try { conn.setReadOnly(true); } catch (SQLException sqle) { // cannot set read-only in an active transaction, & sometimes // connections are active at this point. assertSQLState("25501", sqle); } } } private void assertStatementOK(String dsName, Connection conn, Statement s) throws SQLException { // checks currently only implemented for embedded if (usingEmbedded()) { SecurityCheck.assertSourceSecurity(s, "java.sql.Statement"); } Connection c1 = s.getConnection(); if (c1 != conn) { // with DerbyNetClient and any kind of DataSource, this goes wrong if (!usingDerbyNetClient() && (dsName.indexOf("DataSource") >= 0)) fail ("incorrect connection object returned for Statement.getConnection()"); } s.addBatch("insert into intTable values 1"); s.addBatch("insert into intTable values 2,3"); int[] states = s.executeBatch(); if (states[0] != 1) fail ("invalid update count for first batch statement"); if (states[1] != 2) fail ("invalid update count for second batch statement"); ResultSet rs = s.executeQuery("VALUES 1"); if (rs.getStatement() != s) fail ("incorrect Statement object returned for ResultSet.getStatement for " + dsName); rs.close(); s.close(); } /** When a connection is being pooled, the underlying JDBC embedded connection object is re-used. As each application gets a new Connection object, that is really a wrapper around the old connection it should reset any connection spoecific state on the embedded connection object. */ private static void PoolReset(String type, PooledConnection pc) throws SQLException { PoolResetWork("1", "C", pc.getConnection()); PoolResetWork("2", "", pc.getConnection()); PoolResetWork("3", "D", pc.getConnection()); pc.close(); } private static void PoolResetWork( String expectedID, String tableAction, Connection conn) throws SQLException { Statement s = conn.createStatement(); if (tableAction.equals("C")) { s.execute("CREATE TABLE PoolResetWork (id int generated always as identity, name varchar(25))"); } ResultSet rs = s.executeQuery("VALUES IDENTITY_VAL_LOCAL()"); rs.next(); String val = rs.getString(1); if (!rs.wasNull() || (val != null)) fail ("initial call to IDENTITY_VAL_LOCAL is not NULL!" + val); rs.close(); s.executeUpdate("INSERT INTO PoolResetWork(name) values ('derby-222')"); rs = s.executeQuery("VALUES IDENTITY_VAL_LOCAL()"); rs.next(); val = rs.getString(1); assertEquals(expectedID, val); rs.close(); if (tableAction.equals("D")) { s.execute("DROP TABLE PoolResetWork"); } s.close(); conn.close(); } /** * Make sure this connection's string is unique (DERBY-243) */ private static void assertToString(Connection conn) throws Exception { assertStringFormat(conn); String str = conn.toString(); if ( conns.containsKey(str)) { throw new Exception("ERROR: Connection toString() is not unique: " + str); } conns.put(str, conn); } /** * Check the format of a pooled connection **/ private static void assertStringFormat(PooledConnection pc) throws Exception { String prefix = assertStringPrefix(pc); String connstr = pc.toString(); String format = prefix + " \\(ID = [0-9]+\\), Physical Connection = " + "<none>|" + CONNSTRING_FORMAT; assertTrue(connstr.matches(format)); } /** * Check the format of the connection string. This is the default test * to run if this is not a BrokeredConnection class */ private static void assertStringFormat(Connection conn) //throws Exception { assertStringPrefix(conn); String str = conn.toString(); assertTrue("\nexpected format:\n " + CONNSTRING_FORMAT + "\nactual value:\n " + str, str.matches(CONNSTRING_FORMAT)); } /** * Make sure the connection string starts with the right prefix, which * is the classname@hashcode. * * @return the expected prefix string, this is used in further string * format checking */ private static String assertStringPrefix(Object conn) //throws Exception { String connstr = conn.toString(); String prefix = conn.getClass().getName() + "@" + conn.hashCode(); // Connection class and has code for connection string should // match prefix assertTrue(connstr.startsWith(prefix)); return prefix; } /** * Check uniqueness of connection strings coming from a * DataSouce */ private static void assertToString(DataSource ds) throws Exception { clearConnections(); int numConnections = 10; for ( int i = 0 ; i < numConnections ; i++ ) { Connection conn = ds.getConnection(); assertToString(conn); } clearConnections(); } /** * Clear out and close connections in the connections * hashtable. */ private static void clearConnections() throws SQLException { java.util.Iterator it = conns.values().iterator(); while ( it.hasNext() ) { Connection conn = (Connection)it.next(); conn.close(); } conns.clear(); } /** * Get connections using getConnection() and make sure * they're unique */ private void assertTenConnectionsUnique() throws Exception { clearConnections(); // Open ten connections rather than just two to // try and catch any odd uniqueness bugs. Still // no guarantee but is better than just two. int numConnections = 10; for ( int i = 0 ; i < numConnections ; i++ ) { Connection conn = openDefaultConnection(); assertToString(conn); } // Now close the connections clearConnections(); } /** * Check uniqueness of strings for an XA data source */ private static void assertToString(XADataSource xds) throws Exception { int numConnections = 10; // First get a bunch of pooled connections // and make sure they're all unique Hashtable xaConns = new Hashtable(); for ( int i = 0 ; i < numConnections ; i++ ) { XAConnection xc = xds.getXAConnection(); assertStringFormat(xc); String str = xc.toString(); // XA connection toString should be unique assertNull(xaConns.get(str)); xaConns.put(str, xc); } // Now check that connections from each of these // pooled connections have different string values Iterator it = xaConns.values().iterator(); clearConnections(); while ( it.hasNext() ) { XAConnection xc = (XAConnection)it.next(); Connection conn = xc.getConnection(); assertToString(conn); } clearConnections(); // Now clear out the pooled connections it = xaConns.values().iterator(); while ( it.hasNext() ) { XAConnection xc = (XAConnection)it.next(); xc.close(); } xaConns.clear(); } /** * Check uniqueness of strings with a pooled data source. * We want to check the PooledConnection as well as the * underlying physical connection. */ private static void assertToString(ConnectionPoolDataSource pds) throws Exception { int numConnections = 10; // First get a bunch of pooled connections // and make sure they're all unique Hashtable pooledConns = new Hashtable(); for ( int i = 0 ; i < numConnections ; i++ ) { PooledConnection pc = pds.getPooledConnection(); assertStringFormat(pc); String str = pc.toString(); // Pooled connection toString should be unique assertNull( pooledConns.get(str)); pooledConns.put(str, pc); } // Now check that connections from each of these // pooled connections have different string values Iterator it = pooledConns.values().iterator(); clearConnections(); while ( it.hasNext() ) { PooledConnection pc = (PooledConnection)it.next(); Connection conn = pc.getConnection(); assertToString(conn); } clearConnections(); // Now clear out the pooled connections it = pooledConns.values().iterator(); while ( it.hasNext() ) { PooledConnection pc = (PooledConnection)it.next(); pc.close(); } pooledConns.clear(); } /** * Return the Java class and method for the procedure * for the nested connection test. */ private static String getNestedMethodName() { return "checkNesConn"; } // calling checkConnection // - for use in a procedure to get a nested connection. public static void checkNesConn (String dsName) throws SQLException { Connection conn = DriverManager.getConnection("jdbc:default:connection"); String EmptyMapValue=null; // Note: currently, not supported String NullMapValue=null; String MapMapValue=null; if (usingEmbedded()) { EmptyMapValue="OK"; NullMapValue="XJ081"; MapMapValue="0A000"; } else if (usingDerbyNetClient()) { EmptyMapValue="0A000"; NullMapValue="0A000"; MapMapValue="0A000"; } Object[] expectedValues = { new Integer(ResultSet.HOLD_CURSORS_OVER_COMMIT), "OK", new Integer(2), new Boolean(false), new Boolean(false), EmptyMapValue, NullMapValue, MapMapValue}; new DataSourceTest("DataSourceTest").assertConnectionOK( expectedValues, dsName, conn); } } class cdsXid implements Xid, Serializable { private static final long serialVersionUID = 64467338100036L; private final int format_id; private byte[] global_id; private byte[] branch_id; cdsXid(int xid, byte b1, byte b2) { format_id = xid; global_id = new byte[Xid.MAXGTRIDSIZE]; branch_id = new byte[Xid.MAXBQUALSIZE]; for (int i = 0; i < global_id.length; i++) { global_id[i] = b1; } for (int i = 0; i < branch_id.length; i++) { branch_id[i] = b2; } } /** * Obtain the format id part of the Xid. * <p> * * @return Format identifier. O means the OSI CCR format. **/ public int getFormatId() { return(format_id); } /** * Obtain the global transaction identifier part of XID as an array of * bytes. * <p> * * @return A byte array containing the global transaction identifier. **/ public byte[] getGlobalTransactionId() { return(global_id); } /** * Obtain the transaction branch qualifier part of the Xid in a byte array. * <p> * * @return A byte array containing the branch qualifier of the transaction. **/ public byte[] getBranchQualifier() { return(branch_id); } }
Add commented out code that exposes the bug described by DERBY-3401. Removing a listener from a pooled connection during logical connection close processing causes other listeners to be ignored. git-svn-id: 2c06e9c5008124d912b69f0b82df29d4867c0ce2@619991 13f79535-47bb-0310-9956-ffa450edef68
java/testing/org/apache/derbyTesting/functionTests/tests/jdbcapi/DataSourceTest.java
Add commented out code that exposes the bug described by DERBY-3401. Removing a listener from a pooled connection during logical connection close processing causes other listeners to be ignored.
Java
apache-2.0
44c4730c9ae30c67e4363dbfbb32865f40260b95
0
zimmermatt/flink,mbode/flink,aljoscha/flink,zohar-mizrahi/flink,bowenli86/flink,aljoscha/flink,clarkyzl/flink,PangZhi/flink,xccui/flink,haohui/flink,kl0u/flink,zjureel/flink,WangTaoTheTonic/flink,StephanEwen/incubator-flink,kl0u/flink,hongyuhong/flink,WangTaoTheTonic/flink,haohui/flink,fanzhidongyzby/flink,xccui/flink,gustavoanatoly/flink,apache/flink,mtunique/flink,fhueske/flink,kaibozhou/flink,kaibozhou/flink,fanzhidongyzby/flink,shaoxuan-wang/flink,kaibozhou/flink,tillrohrmann/flink,GJL/flink,zohar-mizrahi/flink,rmetzger/flink,fhueske/flink,zentol/flink,gustavoanatoly/flink,clarkyzl/flink,tony810430/flink,godfreyhe/flink,tillrohrmann/flink,GJL/flink,kl0u/flink,kaibozhou/flink,aljoscha/flink,tony810430/flink,tzulitai/flink,hequn8128/flink,zjureel/flink,jinglining/flink,rmetzger/flink,twalthr/flink,zohar-mizrahi/flink,ueshin/apache-flink,haohui/flink,yew1eb/flink,xccui/flink,jinglining/flink,zohar-mizrahi/flink,fhueske/flink,fanzhidongyzby/flink,gustavoanatoly/flink,fanyon/flink,twalthr/flink,gyfora/flink,ueshin/apache-flink,hongyuhong/flink,lincoln-lil/flink,fhueske/flink,zhangminglei/flink,mylog00/flink,gyfora/flink,rmetzger/flink,hequn8128/flink,zjureel/flink,DieBauer/flink,gustavoanatoly/flink,jinglining/flink,wwjiang007/flink,kaibozhou/flink,ueshin/apache-flink,fanzhidongyzby/flink,zjureel/flink,hequn8128/flink,clarkyzl/flink,zentol/flink,fanyon/flink,zentol/flink,tzulitai/flink,hongyuhong/flink,xccui/flink,clarkyzl/flink,DieBauer/flink,zimmermatt/flink,wwjiang007/flink,jinglining/flink,godfreyhe/flink,twalthr/flink,wwjiang007/flink,fanyon/flink,zentol/flink,bowenli86/flink,hwstreaming/flink,apache/flink,lincoln-lil/flink,rmetzger/flink,zimmermatt/flink,zentol/flink,Xpray/flink,zhangminglei/flink,zhangminglei/flink,kaibozhou/flink,rmetzger/flink,gustavoanatoly/flink,tony810430/flink,xccui/flink,xccui/flink,ueshin/apache-flink,tillrohrmann/flink,shaoxuan-wang/flink,hequn8128/flink,lincoln-lil/flink,WangTaoTheTonic/flink,StephanEwen/incubator-flink,hwstreaming/flink,shaoxuan-wang/flink,apache/flink,PangZhi/flink,tzulitai/flink,tillrohrmann/flink,aljoscha/flink,hongyuhong/flink,bowenli86/flink,zohar-mizrahi/flink,zentol/flink,aljoscha/flink,fhueske/flink,fanyon/flink,gyfora/flink,jinglining/flink,sunjincheng121/flink,gyfora/flink,hequn8128/flink,twalthr/flink,yew1eb/flink,hwstreaming/flink,greghogan/flink,yew1eb/flink,tony810430/flink,mbode/flink,rmetzger/flink,darionyaphet/flink,GJL/flink,hongyuhong/flink,xccui/flink,Xpray/flink,yew1eb/flink,Xpray/flink,darionyaphet/flink,bowenli86/flink,sunjincheng121/flink,godfreyhe/flink,haohui/flink,yew1eb/flink,lincoln-lil/flink,mylog00/flink,godfreyhe/flink,jinglining/flink,sunjincheng121/flink,zimmermatt/flink,tony810430/flink,StephanEwen/incubator-flink,twalthr/flink,lincoln-lil/flink,hwstreaming/flink,rmetzger/flink,mtunique/flink,GJL/flink,hwstreaming/flink,mtunique/flink,tillrohrmann/flink,mbode/flink,tony810430/flink,DieBauer/flink,fanyon/flink,StephanEwen/incubator-flink,gyfora/flink,darionyaphet/flink,WangTaoTheTonic/flink,tillrohrmann/flink,DieBauer/flink,bowenli86/flink,mtunique/flink,lincoln-lil/flink,zjureel/flink,zentol/flink,lincoln-lil/flink,wwjiang007/flink,ueshin/apache-flink,mylog00/flink,kl0u/flink,GJL/flink,tzulitai/flink,godfreyhe/flink,kl0u/flink,tzulitai/flink,darionyaphet/flink,gyfora/flink,Xpray/flink,wwjiang007/flink,kl0u/flink,mbode/flink,apache/flink,zimmermatt/flink,darionyaphet/flink,shaoxuan-wang/flink,godfreyhe/flink,haohui/flink,greghogan/flink,sunjincheng121/flink,greghogan/flink,sunjincheng121/flink,tzulitai/flink,tony810430/flink,mylog00/flink,WangTaoTheTonic/flink,greghogan/flink,mtunique/flink,aljoscha/flink,zjureel/flink,gyfora/flink,tillrohrmann/flink,twalthr/flink,twalthr/flink,StephanEwen/incubator-flink,PangZhi/flink,DieBauer/flink,zjureel/flink,apache/flink,Xpray/flink,apache/flink,godfreyhe/flink,wwjiang007/flink,hequn8128/flink,bowenli86/flink,zhangminglei/flink,sunjincheng121/flink,wwjiang007/flink,shaoxuan-wang/flink,mylog00/flink,PangZhi/flink,greghogan/flink,fhueske/flink,GJL/flink,fanzhidongyzby/flink,PangZhi/flink,shaoxuan-wang/flink,apache/flink,zhangminglei/flink,greghogan/flink,clarkyzl/flink,mbode/flink,StephanEwen/incubator-flink
/*********************************************************************************************************************** * * Copyright (C) 2010-2012 by the Stratosphere project (http://stratosphere.eu) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **********************************************************************************************************************/ package eu.stratosphere.nephele.fs.s3; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import org.junit.Before; import org.junit.Test; import eu.stratosphere.nephele.configuration.Configuration; import eu.stratosphere.nephele.configuration.GlobalConfiguration; import eu.stratosphere.nephele.fs.BlockLocation; import eu.stratosphere.nephele.fs.FSDataInputStream; import eu.stratosphere.nephele.fs.FSDataOutputStream; import eu.stratosphere.nephele.fs.FileStatus; import eu.stratosphere.nephele.fs.FileSystem; import eu.stratosphere.nephele.fs.Path; /** * This test checks the S3 implementation of the {@link FileSystem} interface. * * @author warneke */ public class S3FileSystemTest { /** * The length of the bucket/object names used in this test. */ private static final int NAME_LENGTH = 32; /** * The alphabet to generate the random bucket/object names from. */ private static final char[] ALPHABET = { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }; /** * The size of the byte buffer used during the tests in bytes. */ private static final int TEST_BUFFER_SIZE = 128; /** * The size of the small test file in bytes. */ private static final int SMALL_FILE_SIZE = 512; /** * The size of the large test file in bytes. */ private static final int LARGE_FILE_SIZE = 1024 * 1024 * 12; // 12 MB /** * The modulus to be used when generating the test data. Must not be larger than 128. */ private static final int MODULUS = 128; private static final String S3_BASE_URI = "s3:///"; /** * Tries to read the AWS access key and the AWS secret key from the environments variables. If accessing these keys * fails, all tests will be skipped and marked as successful. */ @Before public void initKeys() { final String accessKey = System.getenv("AK"); final String secretKey = System.getenv("SK"); final Configuration conf = new Configuration(); if (accessKey != null) { conf.setString(S3FileSystem.S3_ACCESS_KEY_KEY, accessKey); } if (secretKey != null) { conf.setString(S3FileSystem.S3_SECRET_KEY_KEY, secretKey); } GlobalConfiguration.includeConfiguration(conf); } /** * This test creates and deletes a bucket inside S3 and checks it is correctly displayed inside the directory * listing. */ @Test public void createAndDeleteBucketTest() { if (!testActivated()) { return; } final String bucketName = getRandomName(); final Path bucketPath = new Path(S3_BASE_URI + bucketName + Path.SEPARATOR); try { final FileSystem fs = bucketPath.getFileSystem(); // Create directory fs.mkdirs(bucketPath); // Check if directory is correctly displayed in file system hierarchy final FileStatus[] content = fs.listStatus(new Path(S3_BASE_URI)); boolean entryFound = false; for (final FileStatus entry : content) { if (bucketPath.equals(entry.getPath())) { entryFound = true; break; } } if (!entryFound) { fail("Cannot find entry " + bucketName + " in directory " + S3_BASE_URI); } // Check the concrete directory file status try { final FileStatus directoryFileStatus = fs.getFileStatus(bucketPath); assertTrue(directoryFileStatus.isDir()); assertEquals(0L, directoryFileStatus.getAccessTime()); assertTrue(directoryFileStatus.getModificationTime() > 0L); } catch (FileNotFoundException e) { fail(e.getMessage()); } // Delete the bucket fs.delete(bucketPath, true); // Make sure the bucket no longer exists try { fs.getFileStatus(bucketPath); fail("Expected FileNotFoundException for " + bucketPath.toUri()); } catch (FileNotFoundException e) { // This is an expected exception } } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * Creates and reads the a larger test file in S3. The test file is generated according to a specific pattern. * During the read phase the incoming data stream is also checked against this pattern. */ @Test public void createAndReadLargeFileTest() { try { createAndReadFileTest(LARGE_FILE_SIZE); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * Creates and reads the a small test file in S3. The test file is generated according to a specific pattern. * During the read phase the incoming data stream is also checked against this pattern. */ @Test public void createAndReadSmallFileTest() { try { createAndReadFileTest(SMALL_FILE_SIZE); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * The tests checks the mapping of the file system directory structure to the underlying bucket/object model of * Amazon S3. */ @Test public void multiLevelDirectoryTest() { if (!testActivated()) { return; } final String dirName = getRandomName(); final String subdirName = getRandomName(); final String subsubdirName = getRandomName(); final String fileName = getRandomName(); final Path dir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR); final Path subdir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + subdirName + Path.SEPARATOR); final Path subsubdir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + subdirName + Path.SEPARATOR + subsubdirName + Path.SEPARATOR); final Path file = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + subdirName + Path.SEPARATOR + fileName); try { final FileSystem fs = dir.getFileSystem(); fs.mkdirs(subsubdir); final OutputStream os = fs.create(file, true); generateTestData(os, SMALL_FILE_SIZE); os.close(); // On this directory levels there should only be one subdirectory FileStatus[] list = fs.listStatus(dir); int numberOfDirs = 0; int numberOfFiles = 0; for (final FileStatus entry : list) { if (entry.isDir()) { ++numberOfDirs; assertEquals(subdir, entry.getPath()); } else { fail(entry.getPath() + " is a file which must not appear on this directory level"); } } assertEquals(1, numberOfDirs); assertEquals(0, numberOfFiles); list = fs.listStatus(subdir); numberOfDirs = 0; for (final FileStatus entry : list) { if (entry.isDir()) { assertEquals(subsubdir, entry.getPath()); ++numberOfDirs; } else { assertEquals(file, entry.getPath()); ++numberOfFiles; } } assertEquals(1, numberOfDirs); assertEquals(1, numberOfFiles); fs.delete(dir, true); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * This test checks the S3 implementation of the file system method to retrieve the block locations of a file. */ @Test public void blockLocationTest() { if (!testActivated()) { return; } final String dirName = getRandomName(); final String fileName = getRandomName(); final Path dir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR); final Path file = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + fileName); try { final FileSystem fs = dir.getFileSystem(); fs.mkdirs(dir); final OutputStream os = fs.create(file, true); generateTestData(os, SMALL_FILE_SIZE); os.close(); final FileStatus fileStatus = fs.getFileStatus(file); assertNotNull(fileStatus); BlockLocation[] blockLocations = fs.getFileBlockLocations(fileStatus, 0, SMALL_FILE_SIZE + 1); assertNull(blockLocations); blockLocations = fs.getFileBlockLocations(fileStatus, 0, SMALL_FILE_SIZE); assertEquals(1, blockLocations.length); final BlockLocation bl = blockLocations[0]; assertNotNull(bl.getHosts()); assertEquals(1, bl.getHosts().length); assertEquals(SMALL_FILE_SIZE, bl.getLength()); assertEquals(0, bl.getOffset()); final URI s3Uri = fs.getUri(); assertNotNull(s3Uri); assertEquals(s3Uri.getHost(), bl.getHosts()[0]); fs.delete(dir, true); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * Creates and reads a file with the given size in S3. The test file is generated according to a specific pattern. * During the read phase the incoming data stream is also checked against this pattern. * * @param fileSize * the size of the file to be generated in bytes * @throws IOException * thrown if an I/O error occurs while writing or reading the test file */ private void createAndReadFileTest(final int fileSize) throws IOException { if (!testActivated()) { return; } final String bucketName = getRandomName(); final String objectName = getRandomName(); final Path bucketPath = new Path(S3_BASE_URI + bucketName + Path.SEPARATOR); final Path objectPath = new Path(S3_BASE_URI + bucketName + Path.SEPARATOR + objectName); FileSystem fs = bucketPath.getFileSystem(); // Create test bucket fs.mkdirs(bucketPath); // Write test file to S3 final FSDataOutputStream outputStream = fs.create(objectPath, false); generateTestData(outputStream, fileSize); outputStream.close(); // Now read the same file back from S3 final FSDataInputStream inputStream = fs.open(objectPath); testReceivedData(inputStream, fileSize); inputStream.close(); // Delete test bucket fs.delete(bucketPath, true); } /** * Receives test data from the given input stream and checks the size of the data as well as the pattern inside the * received data. * * @param inputStream * the input stream to read the test data from * @param expectedSize * the expected size of the data to be read from the input stream in bytes * @throws IOException * thrown if an error occurs while reading the data */ private void testReceivedData(final InputStream inputStream, final int expectedSize) throws IOException { final byte[] testBuffer = new byte[TEST_BUFFER_SIZE]; int totalBytesRead = 0; int nextExpectedNumber = 0; while (true) { final int bytesRead = inputStream.read(testBuffer); if (bytesRead < 0) { break; } totalBytesRead += bytesRead; for (int i = 0; i < bytesRead; ++i) { if (testBuffer[i] != nextExpectedNumber) { throw new IOException("Read number " + testBuffer[i] + " but expected " + nextExpectedNumber); } ++nextExpectedNumber; if (nextExpectedNumber == MODULUS) { nextExpectedNumber = 0; } } } if (totalBytesRead != expectedSize) { throw new IOException("Expected to read " + expectedSize + " bytes but only received " + totalBytesRead); } } /** * Generates test data of the given size according to some specific pattern and writes it to the provided output * stream. * * @param outputStream * the output stream to write the data to * @param size * the size of the test data to be generated in bytes * @throws IOException * thrown if an error occurs while writing the data */ private void generateTestData(final OutputStream outputStream, final int size) throws IOException { final byte[] testBuffer = new byte[TEST_BUFFER_SIZE]; for (int i = 0; i < testBuffer.length; ++i) { testBuffer[i] = (byte) (i % MODULUS); } int bytesWritten = 0; while (bytesWritten < size) { final int diff = size - bytesWritten; if (diff < testBuffer.length) { outputStream.write(testBuffer, 0, diff); bytesWritten += diff; } else { outputStream.write(testBuffer); bytesWritten += testBuffer.length; } } } /** * Generates a random name. * * @return a random name */ private String getRandomName() { final StringBuilder stringBuilder = new StringBuilder(); for (int i = 0; i < NAME_LENGTH; ++i) { final char c = ALPHABET[(int) (Math.random() * (double) ALPHABET.length)]; stringBuilder.append(c); } return stringBuilder.toString(); } /** * Checks whether the AWS access key and the AWS secret keys have been successfully loaded from the configuration * and whether the S3 tests shall be performed. * * @return <code>true</code> if the tests shall be performed, <code>false</code> if the tests shall be skipped * because at least one AWS key is missing */ private boolean testActivated() { final String accessKey = GlobalConfiguration.getString(S3FileSystem.S3_ACCESS_KEY_KEY, null); final String secretKey = GlobalConfiguration.getString(S3FileSystem.S3_SECRET_KEY_KEY, null); if (accessKey != null && secretKey != null) { return true; } return false; } }
nephele/nephele-s3/src/test/java/eu/stratosphere/nephele/fs/s3/S3FileSystemTest.java
/*********************************************************************************************************************** * * Copyright (C) 2010-2012 by the Stratosphere project (http://stratosphere.eu) * * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. * **********************************************************************************************************************/ package eu.stratosphere.nephele.fs.s3; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.net.URI; import org.junit.Before; import org.junit.Test; import eu.stratosphere.nephele.configuration.Configuration; import eu.stratosphere.nephele.configuration.GlobalConfiguration; import eu.stratosphere.nephele.fs.BlockLocation; import eu.stratosphere.nephele.fs.FSDataInputStream; import eu.stratosphere.nephele.fs.FSDataOutputStream; import eu.stratosphere.nephele.fs.FileStatus; import eu.stratosphere.nephele.fs.FileSystem; import eu.stratosphere.nephele.fs.Path; /** * This test checks the S3 implementation of the {@link FileSystem} interface. * * @author warneke */ public class S3FileSystemTest { /** * The length of the bucket/object names used in this test. */ private static final int NAME_LENGTH = 32; /** * The alphabet to generate the random bucket/object names from. */ private static final char[] ALPHABET = { 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' }; /** * The size of the byte buffer used during the tests in bytes. */ private static final int TEST_BUFFER_SIZE = 128; /** * The size of the small test file in bytes. */ private static final int SMALL_FILE_SIZE = 512; /** * The size of the large test file in bytes. */ private static final int LARGE_FILE_SIZE = 1024 * 1024 * 12; // 12 MB /** * The modulus to be used when generating the test data. Must not be larger than 128. */ private static final int MODULUS = 128; private static final String S3_BASE_URI = "s3:///"; /** * Tries to read the AWS access key and the AWS secret key from the environments variables. If accessing these keys * fails, all tests will be skipped and marked as successful. */ @Before public void initKeys() { final String accessKey = System.getenv("AK"); final String secretKey = System.getenv("SK"); final Configuration conf = new Configuration(); conf.setString(S3FileSystem.S3_ACCESS_KEY_KEY, accessKey); conf.setString(S3FileSystem.S3_SECRET_KEY_KEY, secretKey); GlobalConfiguration.includeConfiguration(conf); } /** * This test creates and deletes a bucket inside S3 and checks it is correctly displayed inside the directory * listing. */ @Test public void createAndDeleteBucketTest() { if (!testActivated()) { return; } final String bucketName = getRandomName(); final Path bucketPath = new Path(S3_BASE_URI + bucketName + Path.SEPARATOR); try { final FileSystem fs = bucketPath.getFileSystem(); // Create directory fs.mkdirs(bucketPath); // Check if directory is correctly displayed in file system hierarchy final FileStatus[] content = fs.listStatus(new Path(S3_BASE_URI)); boolean entryFound = false; for (final FileStatus entry : content) { if (bucketPath.equals(entry.getPath())) { entryFound = true; break; } } if (!entryFound) { fail("Cannot find entry " + bucketName + " in directory " + S3_BASE_URI); } // Check the concrete directory file status try { final FileStatus directoryFileStatus = fs.getFileStatus(bucketPath); assertTrue(directoryFileStatus.isDir()); assertEquals(0L, directoryFileStatus.getAccessTime()); assertTrue(directoryFileStatus.getModificationTime() > 0L); } catch (FileNotFoundException e) { fail(e.getMessage()); } // Delete the bucket fs.delete(bucketPath, true); // Make sure the bucket no longer exists try { fs.getFileStatus(bucketPath); fail("Expected FileNotFoundException for " + bucketPath.toUri()); } catch (FileNotFoundException e) { // This is an expected exception } } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * Creates and reads the a larger test file in S3. The test file is generated according to a specific pattern. * During the read phase the incoming data stream is also checked against this pattern. */ @Test public void createAndReadLargeFileTest() { try { createAndReadFileTest(LARGE_FILE_SIZE); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * Creates and reads the a small test file in S3. The test file is generated according to a specific pattern. * During the read phase the incoming data stream is also checked against this pattern. */ @Test public void createAndReadSmallFileTest() { try { createAndReadFileTest(SMALL_FILE_SIZE); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * The tests checks the mapping of the file system directory structure to the underlying bucket/object model of * Amazon S3. */ @Test public void multiLevelDirectoryTest() { if (!testActivated()) { return; } final String dirName = getRandomName(); final String subdirName = getRandomName(); final String subsubdirName = getRandomName(); final String fileName = getRandomName(); final Path dir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR); final Path subdir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + subdirName + Path.SEPARATOR); final Path subsubdir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + subdirName + Path.SEPARATOR + subsubdirName + Path.SEPARATOR); final Path file = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + subdirName + Path.SEPARATOR + fileName); try { final FileSystem fs = dir.getFileSystem(); fs.mkdirs(subsubdir); final OutputStream os = fs.create(file, true); generateTestData(os, SMALL_FILE_SIZE); os.close(); // On this directory levels there should only be one subdirectory FileStatus[] list = fs.listStatus(dir); int numberOfDirs = 0; int numberOfFiles = 0; for (final FileStatus entry : list) { if (entry.isDir()) { ++numberOfDirs; assertEquals(subdir, entry.getPath()); } else { fail(entry.getPath() + " is a file which must not appear on this directory level"); } } assertEquals(1, numberOfDirs); assertEquals(0, numberOfFiles); list = fs.listStatus(subdir); numberOfDirs = 0; for (final FileStatus entry : list) { if (entry.isDir()) { assertEquals(subsubdir, entry.getPath()); ++numberOfDirs; } else { assertEquals(file, entry.getPath()); ++numberOfFiles; } } assertEquals(1, numberOfDirs); assertEquals(1, numberOfFiles); fs.delete(dir, true); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * This test checks the S3 implementation of the file system method to retrieve the block locations of a file. */ @Test public void blockLocationTest() { if (!testActivated()) { return; } final String dirName = getRandomName(); final String fileName = getRandomName(); final Path dir = new Path(S3_BASE_URI + dirName + Path.SEPARATOR); final Path file = new Path(S3_BASE_URI + dirName + Path.SEPARATOR + fileName); try { final FileSystem fs = dir.getFileSystem(); fs.mkdirs(dir); final OutputStream os = fs.create(file, true); generateTestData(os, SMALL_FILE_SIZE); os.close(); final FileStatus fileStatus = fs.getFileStatus(file); assertNotNull(fileStatus); BlockLocation[] blockLocations = fs.getFileBlockLocations(fileStatus, 0, SMALL_FILE_SIZE + 1); assertNull(blockLocations); blockLocations = fs.getFileBlockLocations(fileStatus, 0, SMALL_FILE_SIZE); assertEquals(1, blockLocations.length); final BlockLocation bl = blockLocations[0]; assertNotNull(bl.getHosts()); assertEquals(1, bl.getHosts().length); assertEquals(SMALL_FILE_SIZE, bl.getLength()); assertEquals(0, bl.getOffset()); final URI s3Uri = fs.getUri(); assertNotNull(s3Uri); assertEquals(s3Uri.getHost(), bl.getHosts()[0]); fs.delete(dir, true); } catch (IOException ioe) { fail(ioe.getMessage()); } } /** * Creates and reads a file with the given size in S3. The test file is generated according to a specific pattern. * During the read phase the incoming data stream is also checked against this pattern. * * @param fileSize * the size of the file to be generated in bytes * @throws IOException * thrown if an I/O error occurs while writing or reading the test file */ private void createAndReadFileTest(final int fileSize) throws IOException { if (!testActivated()) { return; } final String bucketName = getRandomName(); final String objectName = getRandomName(); final Path bucketPath = new Path(S3_BASE_URI + bucketName + Path.SEPARATOR); final Path objectPath = new Path(S3_BASE_URI + bucketName + Path.SEPARATOR + objectName); FileSystem fs = bucketPath.getFileSystem(); // Create test bucket fs.mkdirs(bucketPath); // Write test file to S3 final FSDataOutputStream outputStream = fs.create(objectPath, false); generateTestData(outputStream, fileSize); outputStream.close(); // Now read the same file back from S3 final FSDataInputStream inputStream = fs.open(objectPath); testReceivedData(inputStream, fileSize); inputStream.close(); // Delete test bucket fs.delete(bucketPath, true); } /** * Receives test data from the given input stream and checks the size of the data as well as the pattern inside the * received data. * * @param inputStream * the input stream to read the test data from * @param expectedSize * the expected size of the data to be read from the input stream in bytes * @throws IOException * thrown if an error occurs while reading the data */ private void testReceivedData(final InputStream inputStream, final int expectedSize) throws IOException { final byte[] testBuffer = new byte[TEST_BUFFER_SIZE]; int totalBytesRead = 0; int nextExpectedNumber = 0; while (true) { final int bytesRead = inputStream.read(testBuffer); if (bytesRead < 0) { break; } totalBytesRead += bytesRead; for (int i = 0; i < bytesRead; ++i) { if (testBuffer[i] != nextExpectedNumber) { throw new IOException("Read number " + testBuffer[i] + " but expected " + nextExpectedNumber); } ++nextExpectedNumber; if (nextExpectedNumber == MODULUS) { nextExpectedNumber = 0; } } } if (totalBytesRead != expectedSize) { throw new IOException("Expected to read " + expectedSize + " bytes but only received " + totalBytesRead); } } /** * Generates test data of the given size according to some specific pattern and writes it to the provided output * stream. * * @param outputStream * the output stream to write the data to * @param size * the size of the test data to be generated in bytes * @throws IOException * thrown if an error occurs while writing the data */ private void generateTestData(final OutputStream outputStream, final int size) throws IOException { final byte[] testBuffer = new byte[TEST_BUFFER_SIZE]; for (int i = 0; i < testBuffer.length; ++i) { testBuffer[i] = (byte) (i % MODULUS); } int bytesWritten = 0; while (bytesWritten < size) { final int diff = size - bytesWritten; if (diff < testBuffer.length) { outputStream.write(testBuffer, 0, diff); bytesWritten += diff; } else { outputStream.write(testBuffer); bytesWritten += testBuffer.length; } } } /** * Generates a random name. * * @return a random name */ private String getRandomName() { final StringBuilder stringBuilder = new StringBuilder(); for (int i = 0; i < NAME_LENGTH; ++i) { final char c = ALPHABET[(int) (Math.random() * (double) ALPHABET.length)]; stringBuilder.append(c); } return stringBuilder.toString(); } /** * Checks whether the AWS access key and the AWS secret keys have been successfully loaded from the configuration * and whether the S3 tests shall be performed. * * @return <code>true</code> if the tests shall be performed, <code>false</code> if the tests shall be skipped * because at least one AWS key is missing */ private boolean testActivated() { final String accessKey = GlobalConfiguration.getString(S3FileSystem.S3_ACCESS_KEY_KEY, null); final String secretKey = GlobalConfiguration.getString(S3FileSystem.S3_SECRET_KEY_KEY, null); if (accessKey != null && secretKey != null) { return true; } return false; } }
Fixed problem with S3 file system test
nephele/nephele-s3/src/test/java/eu/stratosphere/nephele/fs/s3/S3FileSystemTest.java
Fixed problem with S3 file system test
Java
apache-2.0
cf77c45d4b6a60129b5c3b6ebe9f07aa189c92f7
0
ajordens/orca,ajordens/orca,robfletcher/orca,spinnaker/orca,ajordens/orca,spinnaker/orca,ajordens/orca,spinnaker/orca,robfletcher/orca,robfletcher/orca,robfletcher/orca
/* * Copyright 2020 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.orca.front50.model; import java.util.List; import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @Data @Builder @NoArgsConstructor @AllArgsConstructor public class PluginInfo { @Nonnull private String id; private String description; private String provider; @Nonnull private List<Release> releases; private String homepage; private Repository repository; @Data public static class Release { private String version; private String date; private String requires; private String url; private String sha512sum; private boolean preferred; private String lastModifiedBy; } @Data public static class Repository { private String type; private String url; } }
orca-front50/src/main/groovy/com/netflix/spinnaker/orca/front50/model/PluginInfo.java
/* * Copyright 2020 Netflix, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.netflix.spinnaker.orca.front50.model; import java.util.List; import javax.annotation.Nonnull; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; @Data @Builder @NoArgsConstructor @AllArgsConstructor public class PluginInfo { @Nonnull private String id; private String description; private String provider; @Nonnull private List<Release> releases; @Data public static class Release { private String version; private String date; private String requires; private String url; private String sha512sum; private boolean preferred; private String lastModifiedBy; } }
feat(plugins): New plugin info fields (#3794) Co-authored-by: mergify[bot] <b09a6ee808b67e98a221404e7aaa52e0398a4954@users.noreply.github.com>
orca-front50/src/main/groovy/com/netflix/spinnaker/orca/front50/model/PluginInfo.java
feat(plugins): New plugin info fields (#3794)
Java
apache-2.0
e1fc7eb04297b4962ccb97309caedfa518ad61c5
0
apache/uima-sandbox,apache/uima-sandbox,apache/uima-sandbox
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.ee.test; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import javax.jms.Connection; import javax.jms.Message; import javax.jms.MessageListener; import javax.jms.Session; import org.apache.activemq.ActiveMQMessageConsumer; import org.apache.activemq.command.ActiveMQDestination; import org.apache.uima.UIMAFramework; import org.apache.uima.UIMA_IllegalStateException; import org.apache.uima.aae.UimaClassFactory; import org.apache.uima.aae.client.UimaASProcessStatus; import org.apache.uima.aae.client.UimaASStatusCallbackListener; import org.apache.uima.aae.client.UimaAsynchronousEngine; import org.apache.uima.aae.controller.Endpoint; import org.apache.uima.aae.error.ServiceShutdownException; import org.apache.uima.adapter.jms.JmsConstants; import org.apache.uima.adapter.jms.activemq.JmsOutputChannel; import org.apache.uima.adapter.jms.client.BaseUIMAAsynchronousEngine_impl; import org.apache.uima.adapter.jms.message.JmsMessageContext; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.cas.CAS; import org.apache.uima.collection.CollectionReader; import org.apache.uima.collection.CollectionReaderDescription; import org.apache.uima.collection.EntityProcessStatus; import org.apache.uima.ee.test.utils.BaseTestSupport; import org.apache.uima.internal.util.XMLUtils; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.ResourceSpecifier; import org.apache.uima.resource.metadata.ProcessingResourceMetaData; import org.apache.uima.util.Level; import org.apache.uima.util.XMLInputSource; public class TestUimaASExtended extends BaseTestSupport { private static final int DEFAULT_HTTP_PORT = 8888; private CountDownLatch getMetaCountLatch = null; private static final int MaxGetMetaRetryCount = 2; private static final String primitiveServiceQueue1 = "NoOpAnnotatorQueue"; private static final String PrimitiveDescriptor1 = "resources/descriptors/analysis_engine/NoOpAnnotator.xml"; private int getMetaRequestCount = 0; /** * Tests Broker startup and shutdown */ public void testBrokerLifecycle() { System.out.println("-------------- testBrokerLifecycle -------------"); System.out.println("UIMA_HOME="+System.getenv("UIMA_HOME")+System.getProperty("file.separator")+"bin"+System.getProperty("file.separator")+"dd2spring.xsl"); } /** * Tests handling of multiple calls to initialize(). A subsequent call to * initialize should result in ResourceInitializationException. * * @throws Exception */ public void testInvalidInitializeCall() throws Exception { System.out.println("-------------- testInvalidInitializeCall -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); super.setExpectingServiceShutdown(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); try { initialize(eeUimaEngine, appCtx); waitUntilInitialized(); System.out.println("First Initialize Call Completed"); eeUimaEngine.initialize(appCtx); fail("Subsequent call to initialize() did not return expected exception:"+ UIMA_IllegalStateException.class+" Subsequent call to initialize succeeded with no error"); } catch( ResourceInitializationException e) { if ( e.getCause() != null && !(e.getCause() instanceof UIMA_IllegalStateException ) ) { fail("Invalid Exception Thrown. Expected:"+ UIMA_IllegalStateException.class+" Received:"+ e.getClass()); } else { System.out.println("Received Expected Exception:"+ UIMA_IllegalStateException.class); } } catch( ServiceShutdownException e) { // expected } finally { eeUimaEngine.stop(); } } /** * Tests deployment of a primitive Uima EE Service (PersontTitleAnnotator). Deploys the primitive * in the same jvm using Uima EE Client API and blocks on a monitor until the Uima Client calls initializationComplete() * method. Once the primitive service starts it is expected to send its metadata to the Uima client * which in turn notifies this object with a call to initializationComplete() where the monitor * is signaled to unblock the thread. This code will block if the Uima Client does not call * initializationComplete() * * @throws Exception */ public void testDeployPrimitiveService() throws Exception { System.out.println("-------------- testDeployPrimitiveService -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue", 0, EXCEPTION_LATCH); } /** * Tests a simple Aggregate with one remote Delegate and collocated Cas Multiplier * * @throws Exception */ public void testDeployAggregateService() throws Exception { System.out.println("-------------- testDeployAggregateService -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); System.setProperty(JmsConstants.SessionTimeoutOverride, "2500000"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 0, PROCESS_LATCH); } /** * Tests a simple Aggregate with one remote Delegate and collocated Cas Multiplier * * @throws Exception */ public void testDeployAggregateServiceWithTempReplyQueue() throws Exception { System.out.println("-------------- testDeployAggregateServiceWithTempReplyQueue -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateUsingRemoteTempQueue.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * Tests a simple Aggregate with one remote Delegate and collocated Cas Multiplier * * @throws Exception */ public void testProcessAggregateServiceWith1000Docs() throws Exception { System.out.println("-------------- testProcessAggregateServiceWith1000Docs -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithInternalCM1000Docs.xml"); // deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWith1MillionDocs.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessAggregateWithInnerAggregateCM() throws Exception { System.out.println("-------------- testProcessAggregateWithInnerAggregateCM() -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); System.setProperty(JmsConstants.SessionTimeoutOverride, "2500000"); deployService(eeUimaEngine, relativePath+"/Deploy_ComplexAggregateWithInnerAggregateCM.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * Tests exception thrown in the Uima EE Client when the Collection Reader is added after * the uima ee client is initialized * * @throws Exception */ /* public void testCollectionReader() throws Exception { System.out.println("-------------- testCollectionReader -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); // reduce the cas pool size and reply window appCtx.remove(UimaAsynchronousEngine.CasPoolSize); appCtx.put(UimaAsynchronousEngine.CasPoolSize, Integer.valueOf(2)); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); // set the collection reader File collectionReaderDescriptor = new File("resources/descriptors/collection_reader/FileSystemCollectionReader.xml"); CollectionReaderDescription collectionReaderDescription = UIMAFramework.getXMLParser() .parseCollectionReaderDescription(new XMLInputSource(collectionReaderDescriptor)); CollectionReader collectionReader = UIMAFramework .produceCollectionReader(collectionReaderDescription); eeUimaEngine.setCollectionReader(collectionReader); initialize(eeUimaEngine, appCtx); waitUntilInitialized(); runCrTest(eeUimaEngine, 7); eeUimaEngine.stop(); } */ /** * Tests exception thrown in the Uima EE Client when the Collection Reader is added after * the uima ee client is initialized * * @throws Exception */ public void testExceptionOnPostInitializeCollectionReaderInjection() throws Exception { System.out.println("-------------- testExceptionOnPostInitializeCollectionReaderInjection -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); super.setExpectingServiceShutdown(); initialize(eeUimaEngine, appCtx); waitUntilInitialized(); try { // Simulate plugging in a Collection Reader. This should throw // ResourceInitializationException since the client code has // been already initialized. eeUimaEngine.setCollectionReader(null); } catch( ResourceInitializationException e) { System.out.println("Received Expected Exception:"+ResourceInitializationException.class); // Expected return; } catch( Exception e) { fail("Invalid Exception Thrown. Expected:"+ ResourceInitializationException.class+" Received:"+ e.getClass()); } finally { eeUimaEngine.stop(); } fail("Expected" + ResourceInitializationException.class); } /** * Tests the shutdown due to a failure in the Flow Controller while diabling a delegate * * @throws Exception */ public void testTerminateOnFlowControllerExceptionOnDisable() throws Exception { System.out.println("-------------- testTerminateOnFlowControllerExceptionOnNextStep -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithFlowControllerExceptionOnDisable.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); //PC_LATCH); } /** * Tests the shutdown due to a failure in the Flow Controller when initializing * * @throws Exception */ public void testTerminateOnFlowControllerExceptionOnInitialization() throws Exception { System.out.println("-------------- testTerminateOnFlowControllerExceptionOnInitialization -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String[] containerIds = new String[2]; try { containerIds[0] = deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); containerIds[1] = deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithFlowControllerExceptionOnInitialization.xml"); fail("Expected ResourceInitializationException. Instead, the Aggregate Deployed Successfully"); } catch (ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("\nExpected Initialization Exception was received:"+cause); } catch (Exception e) { fail("Expected ResourceInitializationException. Instead Got:" + e.getClass()); } finally { eeUimaEngine.undeploy(containerIds[0]); eeUimaEngine.undeploy(containerIds[1]); } } /** * Tests the shutdown due to a failure in the Flow Controller when initializing AND have delegates to disable * (Jira issue UIMA-1171) * * @throws Exception */ public void testTerminateOnFlowControllerExceptionOnInitializationWithDisabledDelegates() throws Exception { System.out.println("-------------- testTerminateOnFlowControllerExceptionOnInitializationWithDisabledDelegates -----"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String containerId = null; try { containerId = deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithFlowControllerExceptionOnInitialization.xml"); fail("Expected ResourceInitializationException. Instead, the Aggregate Deployed Successfully"); } catch (ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("\nExpected Initialization Exception was received - cause: "+cause); } catch (Exception e) { fail("Expected ResourceInitializationException. Instead Got:" + e.getClass()); } finally { eeUimaEngine.undeploy(containerId); } } /** * Deploys a Primitive Uima EE service and sends 5 CASes to it. * * @throws Exception */ public void testPrimitiveServiceProcess() throws Exception { System.out.println("-------------- testPrimitiveServiceProcess -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue", 5, PROCESS_LATCH); } /** * Deploys a Primitive Uima EE service and sends 5 CASes to it. * * @throws Exception */ public void testSyncAggregateProcess() throws Exception { System.out.println("-------------- testSyncAggregateProcess -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_MeetingDetectorAggregate.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"MeetingDetectorQueue", 5, PROCESS_LATCH); } /** * Deploys a Primitive Uima EE service and sends 5 CASes to it. * * @throws Exception */ public void testPrimitiveServiceProcessPingFailure() throws Exception { System.out.println("-------------- testPrimitiveServiceProcessPingFailure -------------"); // Instantiate Uima EE Client final BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service final String containerID = deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "PersonTitleAnnotatorQueue" ); // Set an explicit getMeta (Ping)timeout appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 2000 ); // Set an explicit process timeout so to test the ping on timeout appCtx.put(UimaAsynchronousEngine.Timeout, 1000 ); // Spin a thread and wait for awhile before killing the remote service. // This will cause the client to timeout waiting for a CAS reply and // to send a Ping message to test service availability. The Ping times // out and causes the client API to stop. new Thread() { public void run() { Object mux = new Object(); synchronized( mux ) { try { mux.wait(500); // Undeploy service container eeUimaEngine.undeploy(containerID); } catch (Exception e) {} } } }.start(); try { // RuntimeException is expected due to failure runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue", 500, EXCEPTION_LATCH); } catch( RuntimeException e) {} } /** * Tests error handling on delegate timeout. The Delegate is started as remote, * the aggregate initializes and the client starts sending CASes. After a short * while the client kills the remote delegate. The aggregate receives a CAS * timeout and disables the delegate. A timed out CAS is sent to the next * delegate in the pipeline. ALL 1000 CASes are returned to the client. * * @throws Exception */ public void testDelegateTimeoutAndDisable() throws Exception { System.out.println("-------------- testDelegateTimeoutAndDisable -------------"); // Instantiate Uima EE Client final BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service final String containerID = deployService(eeUimaEngine, relativePath+"/Deploy_RoomNumberAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_MeetingDetectorTAE_RemoteRoomNumberDisable.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "MeetingDetectorTaeQueue" ); // Set an explicit getMeta (Ping)timeout appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 2000 ); // Set an explicit process timeout so to test the ping on timeout appCtx.put(UimaAsynchronousEngine.Timeout, 1000 ); // Spin a thread and wait for awhile before killing the remote service. // This will cause the client to timeout waiting for a CAS reply and // to send a Ping message to test service availability. The Ping times // out and causes the client API to stop. new Thread() { public void run() { Object mux = new Object(); synchronized( mux ) { try { mux.wait(500); // Undeploy service container eeUimaEngine.undeploy(containerID); } catch (Exception e) {} } } }.start(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"MeetingDetectorTaeQueue", 1000, PROCESS_LATCH); } /** * This test kills a remote Delegate while in the middle of processing 1000 CASes. * The CAS timeout error handling disables the delegate and forces ALL CASes * from the Pending Reply List to go through Error Handler. The Flow Controller * is configured to continueOnError and CASes that timed out are allowed to * continue to the next delegate. ALL 1000 CASes are accounted for in the * NoOp Annotator that is last in the flow. * * @throws Exception */ public void testDisableDelegateOnTimeoutWithCM() throws Exception { System.out.println("-------------- testDisableDelegateOnTimeoutWithCM -------------"); // Instantiate Uima EE Client final BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service final String containerID = deployService(eeUimaEngine, relativePath+"/Deploy_RoomNumberAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_MeetingDetectorTAEWithCM_RemoteRoomNumberDisable.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "MeetingDetectorTaeQueue" ); // Set an explicit getMeta (Ping)timeout appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 2000 ); // Set an explicit process timeout so to test the ping on timeout appCtx.put(UimaAsynchronousEngine.Timeout, 1000 ); // Spin a thread and wait for awhile before killing the remote service. // This will cause the client to timeout waiting for a CAS reply and // to send a Ping message to test service availability. The Ping times // out and causes the client API to stop. new Thread() { public void run() { Object mux = new Object(); synchronized( mux ) { try { mux.wait(300); // Undeploy service container eeUimaEngine.undeploy(containerID); } catch (Exception e) {} } } }.start(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"MeetingDetectorTaeQueue", 1, PROCESS_LATCH); } /** * Tests Uima EE client ability to test sendAndReceive in multiple/concurrent threads * It spawns 4 thread each sending 100 CASes to a Primitive Uima EE service * @throws Exception */ public void testSynchCallProcessWithMultipleThreads() throws Exception { System.out.println("-------------- testSynchCallProcessWithMultipleThreads -------------"); int howManyCASesPerRunningThread = 100; int howManyRunningThreads = 4; super.setExpectingServiceShutdown(); runTestWithMultipleThreads(relativePath+"/Deploy_PersonTitleAnnotator.xml", "PersonTitleAnnotatorQueue", howManyCASesPerRunningThread, howManyRunningThreads, 0, 0 ); } /** * * @throws Exception */ public void testPrimitiveProcessCallWithLongDelay() throws Exception { System.out.println("-------------- testPrimitiveProcessCallWithLongDelay -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); super.setExpectingServiceShutdown(); // We expect 18000ms to be spent in process method super.setExpectedProcessTime(6000); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"NoOpAnnotatorQueueLongDelay" ); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"NoOpAnnotatorQueueLongDelay", 4, PROCESS_LATCH, true); } /** * Tests time spent in process CAS. The CAS is sent to three remote delegates each * with a delay of 6000ms in the process method. The aggregate is expected to sum * up the time spent in each annotator process method. The final sum is returned * to the client (the test) and compared against expected 18000ms. The test actually * allows for 20ms margin to account for any overhead (garbage collecting, slow cpu, etc) * * @throws Exception */ public void testAggregateProcessCallWithLongDelay() throws Exception { System.out.println("-------------- testAggregateProcessCallWithLongDelay -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Services each with 6000ms delay in process() deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorAWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorBWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorCWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithLongDelay.xml"); super.setExpectingServiceShutdown(); // We expect 18000ms to be spent in process method super.setExpectedProcessTime(18000); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue" ); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); // make sure we only send 1 CAS at a time appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH, true); } /** * Tests Aggregate configuration where the Cas Multiplier delegate is the * last delegate in the Aggregate's pipeline * * @throws Exception */ public void testAggregateProcessCallWithLastCM() throws Exception { System.out.println("-------------- testAggregateProcessCallWithLastCM -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Services each with 6000ms delay in process() deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithLastCM.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH, true); } /** * Tests shutdown while running with multiple/concurrent threads * The Annotator throws an exception and the Aggregate error handling is setup to terminate * on the first error. * * @throws Exception */ public void testTimeoutInSynchCallProcessWithMultipleThreads() throws Exception { System.out.println("-------------- testTimeoutInSynchCallProcessWithMultipleThreads -------------"); int howManyCASesPerRunningThread = 2; int howManyRunningThreads = 4; super.setExpectingServiceShutdown(); int processTimeout = 2000; int getMetaTimeout = 500; runTestWithMultipleThreads(relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml", "NoOpAnnotatorQueueLongDelay", howManyCASesPerRunningThread, howManyRunningThreads, processTimeout, getMetaTimeout ); } /** * Tests shutdown while running with multiple/concurrent threads * The Annotator throws an exception and the Aggregate error handling is setup to terminate * on the first error. * * @throws Exception */ public void testTimeoutFailureInSynchCallProcessWithMultipleThreads() throws Exception { System.out.println("-------------- testTimeoutFailureInSynchCallProcessWithMultipleThreads -------------"); int howManyCASesPerRunningThread = 1000; int howManyRunningThreads = 4; super.setExpectingServiceShutdown(); int processTimeout = 2000; int getMetaTimeout = 500; runTestWithMultipleThreads(relativePath+"/Deploy_NoOpAnnotator.xml", "NoOpAnnotatorQueue", howManyCASesPerRunningThread, howManyRunningThreads, 2000, 1000, true ); } /** * Tests a parallel flow in the Uima EE aggregate. * * @throws Exception */ public void testProcessWithParallelFlow() throws Exception { System.out.println("-------------- testProcessWithParallelFlow -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * Tests ability to disable one delegate in parallel flow and continue * * @throws Exception */ public void testDisableDelegateInParallelFlow() throws Exception { System.out.println("-------------- testDisableDelegateInParallelFlow -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * * @throws Exception */ public void testTimeoutDelegateInParallelFlows() throws Exception { System.out.println("-------------- testTimeoutDelegateInParallelFlows -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator3.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlows.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // Set an explicit process timeout so one of the 1st parallels is disabled but 2nd parallel flow continues. appCtx.put(UimaAsynchronousEngine.Timeout, 20000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); addExceptionToignore(org.apache.uima.aae.error.UimaASProcessCasTimeout.class); runTest(appCtx, eeUimaEngine, null, null, 1, PROCESS_LATCH); } /** * Tests Timeout logic * @throws Exception */ public void testRemoteDelegateTimeout() throws Exception { System.out.println("-------------- testRemoteDelegateTimeout -------------"); System.out.println("The Aggregate sends 2 CASes to the NoOp Annotator which"); System.out.println("delays each CAS for 6000ms. The timeout is set to 4000ms"); System.out.println("Two CAS retries are expected"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithLongDelayDelegate.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // The Remote NoOp delays each CAS for 6000ms. The Aggregate sends two CASes so adjust // client timeout to be just over 12000ms. appCtx.put(UimaAsynchronousEngine.Timeout, 13000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); addExceptionToignore(org.apache.uima.aae.error.UimaASProcessCasTimeout.class); runTest(appCtx, eeUimaEngine, null, null, 1, PROCESS_LATCH); } /** * Tests Timeout logic * @throws Exception */ public void testDisableOnRemoteDelegatePingTimeout() throws Exception { System.out.println("-------------- testDisableOnRemoteDelegatePingTimeout -------------"); System.out.println("The Aggregate sends 2 CASes to the NoOp Annotator which"); System.out.println("delays each CAS for 6000ms. The timeout is set to 4000ms"); System.out.println("Two CAS retries are expected"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String delegateContainerId = deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithLongDelayDelegate.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // The Remote NoOp delays each CAS for 6000ms. The Aggregate sends two CASes so adjust // client timeout to be just over 12000ms. appCtx.put(UimaAsynchronousEngine.Timeout, 13000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); addExceptionToignore(org.apache.uima.aae.error.UimaASProcessCasTimeout.class); // Remove container with the remote NoOp delegate so that we can test // the CAS Process and Ping timeout. eeUimaEngine.undeploy(delegateContainerId); // Send the CAS and handle exception runTest(appCtx, eeUimaEngine, null, null, 1, EXCEPTION_LATCH); } public void testDeployAggregateWithCollocatedAggregateService() throws Exception { System.out.println("-------------- testDeployAggregateWithCollocatedAggregateService -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_ComplexAggregate.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 10, PROCESS_LATCH); } public void testProcessWithAggregateUsingCollocatedMultiplier() throws Exception { System.out.println("-------------- testProcessWithAggregateUsingCollocatedMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsingRemoteMultiplier() throws Exception { System.out.println("-------------- testProcessWithAggregateUsingRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMultiplier.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * First CM feeds 100 CASes to a "merger" CM that generates one output CAS for every 5 input. * Second CM creates unique document text that is checked by the last component. * The default FC should let 4 childless CASes through, replacing every 5th by its child. * * @throws Exception */ public void testProcessWithAggregateUsingCollocatedMerger() throws Exception { System.out.println("-------------- testProcessWithAggregateUsingRemoteMerger -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithCollocatedMerger.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsingRemoteMerger() throws Exception { System.out.println("-------------- testProcessWithAggregateUsingRemoteMerger -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMerger.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMerger.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testClientWithAggregateMultiplier() throws Exception { System.out.println("-------------- testClientWithAggregateMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateMultiplier.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue" ); // reduce the cas pool size and reply window appCtx.remove(UimaAsynchronousEngine.ShadowCasPoolSize); appCtx.put(UimaAsynchronousEngine.ShadowCasPoolSize, Integer.valueOf(2)); runTest(appCtx, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testClientProcessWithRemoteMultiplier() throws Exception { System.out.println("-------------- testClientProcessWithRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"TestMultiplierQueue" ); appCtx.remove(UimaAsynchronousEngine.ShadowCasPoolSize); appCtx.put(UimaAsynchronousEngine.ShadowCasPoolSize, Integer.valueOf(1)); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TestMultiplierQueue", 1, PROCESS_LATCH); } public void testClientProcessWithComplexAggregateRemoteMultiplier() throws Exception { System.out.println("-------------- testClientProcessWithComplexAggregateRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith10Docs_1.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_CasMultiplierAggregateWithRemoteCasMultiplier.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsing2RemoteMultipliers() throws Exception { System.out.println("-------------- testProcessWithAggregateUsing2RemoteMultipliers -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith10Docs_1.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith10Docs_2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWith2RemoteMultipliers.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsing2CollocatedMultipliers() throws Exception { System.out.println("-------------- testProcessWithAggregateUsing2CollocatedMultipliers -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWith2Multipliers.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessAggregateWithInnerCMAggregate() throws Exception { System.out.println("-------------- testProcessAggregateWithInnerCMAggregate -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_TopAggregateWithInnerAggregateCM.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testBlueJDeployment() throws Exception { System.out.println("-------------- testBlueJDeployment -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy replicated services for the inner remote aggregate CM deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy an instance of a remote aggregate CM containing a collocated Cas Multiplier // CM --> Replicated Remote Primitive --> NoOp CC deployService(eeUimaEngine, relativePath+"/Deploy_CMAggregateWithCollocatedCM.xml"); // Deploy top level Aggregate Cas Multiplier with 2 collocated Cas Multipliers // CM1 --> CM2 --> Remote AggregateCM --> Candidate Answer --> CC deployService(eeUimaEngine, relativePath+"/Deploy_TopLevelBlueJAggregateCM.xml"); super.setExpectingServiceShutdown(); runTest2(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 10, PROCESS_LATCH); } public void testTypesystemMergeWithMultiplier() throws Exception { System.out.println("-------------- testTypesystemMergeWithMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithMergedTypes.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testStopAggregateWithRemoteMultiplier() throws Exception { System.out.println("-------------- testStopAggregateWithRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithExceptionOn5thCAS.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMultiplier.xml"); super.setExpectingServiceShutdown(); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testCancelProcessAggregateWithCollocatedMultiplier() throws Exception { System.out.println("-------------- testCancelProcessAggregateWithCollocatedMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_ComplexAggregateWith1MillionDocs.xml"); super.setExpectingServiceShutdown(); // Spin a thread to cancel Process after 20 seconds spinShutdownThread( eeUimaEngine, 20000 ); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1,PROCESS_LATCH); } public void testCancelProcessAggregateWithRemoteMultiplier() throws Exception { System.out.println("-------------- testStopAggregateWithRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith1MillionDocs.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMultiplier.xml"); super.setExpectingServiceShutdown(); // Spin a thread to cancel Process after 20 seconds spinShutdownThread( eeUimaEngine, 20000 ); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1,PROCESS_LATCH);//EXCEPTION_LATCH); } /** * Test correct reply from the service when its process method fails. Deploys the Primitive * Service ( NoOp Annotator) that is configured to throw an exception on every CAS. The expected * behavior is for the Primitive Service to return a reply with an Exception. This code blocks * on a Count Down Latch, until the exception is returned from the service. When the exception is * received the latch is opened indicating success. * * @throws Exception */ public void testPrimitiveServiceResponseOnException() throws Exception { System.out.println("-------------- testPrimitiveServiceResponseOnException -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); super.setExpectingServiceShutdown(); // Deploy Uima EE Primitive Service // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"NoOpAnnotatorQueue", 1, EXCEPTION_LATCH); } public void testProcessParallelFlowWithDelegateFailure() throws Exception { System.out.println("-------------- testProcessParallelFlowWithDelegateFailure -------------"); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); UIMAFramework.getLogger().setLevel(Level.FINE); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); // Deploy top level aggregate service deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlowTerminateOnDelegateFailure.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); //PC_LATCH); } /** Tests that the thresholdAction is taken when thresholdCount errors occur in the last thresholdWindow CASes. * Aggregate has two annotators, first fails with increasing frequency (on CASes 10 19 27 34 40 45 49 52 54) * and is disabled after 3 errors in a window of 7 (49,52,54) * Second annotator counts the CASes that reach it and verifies that it sees all but the 9 failures. * It throws an exception if the first is disabled after too many or too few errors. * * @throws Exception */ public void testErrorThresholdWindow() throws Exception { System.out.println("-------------- testErrorThresholdWindow -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); UIMAFramework.getLogger().setLevel(Level.FINE); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithThresholdWindow.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // Set an explicit CPC timeout as exceptions thrown in the 2nd annotator's CPC don't reach the client. appCtx.put(UimaAsynchronousEngine.CpcTimeout, 20000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); //PC_LATCH); } public void testProcessParallelFlowWithDelegateDisable() throws Exception { System.out.println("-------------- testProcessParallelFlowWithDelegateDisable -------------"); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlowDisableOnDelegateFailure.xml"); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); //PC_LATCH); } public void testPrimitiveShutdownOnTooManyErrors() throws Exception { System.out.println("-------------- testPrimitiveShutdownOnTooManyErrors -------------"); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate service deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, CPC_LATCH ); } public void testClientHttpTunnelling() throws Exception { System.out.println("-------------- testClientHttpTunnelling -------------"); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails String httpURI = addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,httpURI,"NoOpAnnotatorQueue", 1, CPC_LATCH ); } public void testClientHttpTunnellingToAggregate() throws Exception { System.out.println("-------------- testClientHttpTunnellingToAggregate -------------"); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails String httpURI = addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,httpURI,"TopLevelTaeQueue", 1, CPC_LATCH ); } public void testClientHttpTunnellingWithDoubleByteText() throws Exception { System.out.println("-------------- testClientHttpTunnellingWithDoubleByteText -------------"); try { File file = new File(relativeDataPath+"/DoubleByteText.txt"); System.out.println("Checking for existence of File:"+file.getAbsolutePath()); // Process only if the file exists if ( file.exists()) { System.out.println(" *** DoubleByteText.txt exists and will be sent through http connector."); System.out.println(" *** If the vanilla activemq release is being used,"); System.out.println(" *** and DoubleByteText.txt is bigger than 64KB or so, this test case will hang."); System.out.println(" *** To fix, override the classpath with the jar files in and under the"); System.out.println(" *** apache-uima-as/uima-as-distr/src/main/apache-activemq-X.y.z directory"); System.out.println(" *** in the apache-uima-as source distribution."); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails String httpURI = addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); InputStream fis = new FileInputStream(file); Reader rd = new InputStreamReader(fis, "UTF-8"); BufferedReader in = new BufferedReader(rd); // Set the double-byte text. This is what will be sent to the service String line = in.readLine(); super.setDoubleByteText(line); int err = XMLUtils.checkForNonXmlCharacters(line); if (err >= 0) { fail("Illegal XML char at offset " + err); } super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,httpURI,"NoOpAnnotatorQueue", 1, CPC_LATCH ); } } catch( Exception e) { // Double-Byte Text file not present. Continue on with the next test e.printStackTrace(); fail("Could not complete test"); } } public void testAggregateHttpTunnelling() throws Exception { System.out.println("-------------- testAggregateHttpTunnelling -------------"); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithHttpDelegate.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 10, CPC_LATCH ); } /** * Tests exception thrown in the Uima EE Client when the Collection Reader is added after * the uima ee client is initialized * * @throws Exception */ public void testCollectionReader() throws Exception { System.out.println("-------------- testCollectionReader -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); // reduce the cas pool size and reply window appCtx.remove(UimaAsynchronousEngine.CasPoolSize); appCtx.put(UimaAsynchronousEngine.CasPoolSize, Integer.valueOf(2)); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); super.setExpectingServiceShutdown(); // set the collection reader String filename = super.getFilepathFromClassloader("descriptors/collection_reader/FileSystemCollectionReader.xml"); if ( filename == null ) { fail("Unable to find file:"+"descriptors/collection_reader/FileSystemCollectionReader.xml"+ "in classloader"); } File collectionReaderDescriptor = new File(filename); CollectionReaderDescription collectionReaderDescription = UIMAFramework.getXMLParser() .parseCollectionReaderDescription(new XMLInputSource(collectionReaderDescriptor)); CollectionReader collectionReader = UIMAFramework .produceCollectionReader(collectionReaderDescription); eeUimaEngine.setCollectionReader(collectionReader); initialize(eeUimaEngine, appCtx); waitUntilInitialized(); runCrTest(eeUimaEngine, 7); synchronized(this) { wait(50); } eeUimaEngine.stop(); } public void testAsynchronousTerminate() throws Exception { System.out.println("-------------- testAsynchronousTerminate -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); initialize(eeUimaEngine, appCtx); // Wait until the top level service returns its metadata waitUntilInitialized(); super.setExpectingServiceShutdown(); CAS cas = eeUimaEngine.getCAS(); System.out.println(" Sending CAS to kick off aggregate w/colocated CasMultiplier"); eeUimaEngine.sendCAS(cas); System.out.println(" Waiting 1 seconds"); Thread.sleep(1000); System.out.println(" Trying to stop service"); eeUimaEngine.stop(); System.out.println(" stop() returned!"); } public void testCallbackListenerOnFailure() throws Exception { System.out.println("-------------- testCallbackListenerOnFailure -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "NoOpAnnotatorQueue" ); initialize(eeUimaEngine, appCtx); // Wait until the top level service returns its metadata waitUntilInitialized(); super.setExpectingServiceShutdown(); CAS cas = eeUimaEngine.getCAS(); // Register special callback listener. This listener will receive // an exception with the Cas Reference id. TestListener listener = new TestListener(this); eeUimaEngine.addStatusCallbackListener(listener); // Send request out and save Cas Reference id String casReferenceId = eeUimaEngine.sendCAS(cas); // Spin a callback listener thread Thread t = new Thread(listener); t.start(); // Wait for reply CAS. This method blocks String cRefId = listener.getCasReferenceId(); try { // Test if received Cas Reference Id matches the id of the CAS sent out if ( !cRefId.equals(casReferenceId)) { fail( "Received Invalid Cas Reference Id. Expected:"+casReferenceId+" Received: "+cRefId); } else { System.out.println("Received Expected Cas Identifier:"+casReferenceId); } } finally { // Stop callback listener thread listener.doStop(); eeUimaEngine.stop(); } } public void testTerminateOnInitializationFailure() throws Exception { System.out.println("-------------- testTerminateOnInitializationFailure -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); try { deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); exceptionCountLatch = new CountDownLatch(1); initialize(eeUimaEngine, appCtx); fail("Expected ResourceInitializationException. Instead, the Aggregate Reports Successfull Initialization"); } catch( ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("Expected Initialization Exception was received:"+cause); eeUimaEngine.stop(); } catch( Exception e) { fail("Expected ResourceInitializationException. Instead Got:"+e.getClass()); } } /** * Tests shutdown due to delegate broker missing. The Aggregate is configured to * retry getMeta 3 times and continue. The client times out after 20 seconds and forces the * shutdown. NOTE: The Spring listener tries to recover JMS connection on failure. In this * test a Listener to remote delegate cannot be established due to a missing broker. The * Listener is setup to retry every 60 seconds. After failure, the listener goes to sleep * for 60 seconds and tries again. This results in a 60 second delay at the end of this test. * * @throws Exception */ public void testTerminateOnInitializationFailureWithDelegateBrokerMissing() throws Exception { System.out.println("-------------- testTerminateOnInitializationFailureWithDelegateBrokerMissing -------------"); System.out.println("---------------------- The Uima Client Times Out After 20 seconds --------------------------"); System.out.println("-- The test requires 1 minute to complete due to 60 second delay in Spring Listener ----"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); try { // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorTerminateOnDelegateBadBrokerURL.xml"); // Initialize and run the Test. Wait for a completion and cleanup resources. Map<String, Object> appCtx = new HashMap(); appCtx.put(UimaAsynchronousEngine.ServerUri, String.valueOf(broker.getMasterConnectorURI())); appCtx.put(UimaAsynchronousEngine.Endpoint, "TopLevelTaeQueue"); appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 20000); runTest(appCtx,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 1, EXCEPTION_LATCH ); fail("Expected ResourceInitializationException. Instead, the Aggregate Reports Successfull Initialization"); } catch( ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("Expected Initialization Exception was received:"+cause); } catch( Exception e) { fail("Expected ResourceInitializationException. Instead Got:"+e.getClass()); } eeUimaEngine.stop(); } /** * Tests shutdown due to delegate broker missing. The Aggregate is configured to * retry getMeta 3 times and continue. The client times out after 20 seconds and forces the * shutdown. NOTE: The Spring listener tries to recover JMS connection on failure. In this * test a Listener to remote delegate cannot be established due to a missing broker. The * Listener is setup to retry every 60 seconds. After failure, the listener goes to sleep * for 60 seconds and tries again. This results in a 60 second delay at the end of this test. * * @throws Exception */ public void testDisableOnInitializationFailureWithDelegateBrokerMissing() throws Exception { System.out.println("-------------- testDisableOnInitializationFailureWithDelegateBrokerMissing() -------------"); System.out.println("---------------------- The Uima Client Times Out After 20 seconds --------------------------"); System.out.println("-- The test requires 1 minute to complete due to 60 second delay in Spring Listener ----"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); try { // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithHttpDelegate.xml"); // Initialize and run the Test. Wait for a completion and cleanup resources. Map<String, Object> appCtx = new HashMap(); appCtx.put(UimaAsynchronousEngine.ServerUri, String.valueOf(broker.getMasterConnectorURI())); appCtx.put(UimaAsynchronousEngine.Endpoint, "TopLevelTaeQueue"); appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 20000); runTest(appCtx,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 1, PROCESS_LATCH ); } catch( Exception e) { fail("Expected Success. Instead Received Exception:"+e.getClass()); } eeUimaEngine.stop(); } /** * Tests shutdown due to delegate broker missing. The Aggregate is configured to * terminate on getMeta timeout. * * @throws Exception */ public void testTerminateOnInitializationFailureWithAggregateForcedShutdown() throws Exception { System.out.println("-------------- testTerminateOnInitializationFailureWithAggregateForcedShutdown -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Initialize and run the Test. Wait for a completion and cleanup resources. try { // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithHttpDelegateNoRetries.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 10, EXCEPTION_LATCH ); fail("Expected ResourceInitializationException. Instead, the Aggregate Reports Successfull Initialization"); } catch( ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("Expected Initialization Exception was received:"+cause); } catch( Exception e) { fail("Expected ResourceInitializationException. Instead Got:"+e.getClass()); } eeUimaEngine.stop(); } /** * This tests some of the error handling. Each annotator writes a file and throws an exception. * After the CAS is processed the presence/absence of certain files indicates success or failure. * The first annotator fails and lets the CAS proceed, so should write only one file. * The second annotator fails and is retried 2 times, and doesn't let the CAS proceed, so should write 3 files. * The third annotator should not see the CAS, so should not write any files * * @throws Exception */ public void testContinueOnRetryFailure() throws Exception { System.out.println("-------------- testContinueOnRetryFailure -------------"); File tempDir = new File("temp"); deleteAllFiles(tempDir); tempDir.mkdir(); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_WriterAnnotatorA.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_WriterAnnotatorB.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithContinueOnRetryFailures.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); if ( !(new File(tempDir, "WriterAnnotatorB.3")).exists() || (new File(tempDir, "WriterAnnotatorB.4")).exists()) { fail("Second annotator should have run 3 times"); } if ((new File(tempDir, "WriterAnnotatorC.1")).exists()) { fail("Third annotator should not have seen CAS"); } } /** * Test use of a JMS Service Adapter. * Invoke from a synchronous aggregate to emulate usage from RunAE or RunCPE. * * @throws Exception */ public void testJmsServiceAdapter() throws Exception { System.out.println("-------------- testJmsServiceAdapter -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsService.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testJmsServiceAdapterWithException() throws Exception { System.out.println("-------------- testJmsServiceAdapterWithException -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsService.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testJmsServiceAdapterWithProcessTimeout() throws Exception { System.out.println("-------------- testJmsServiceAdapterWithProcessTimeout -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsServiceLongDelay.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testJmsServiceAdapterWithGetmetaTimeout() throws Exception { System.out.println("-------------- testJmsServiceAdapterWithGetmetaTimeout -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); addExceptionToignore(ResourceInitializationException.class); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsService.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 1500 ); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testDeployAgainAndAgain() throws Exception { System.out.println("-------------- testDeployAgainAndAgain -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // here or in the loop, no change. for (int num=1; num<=50; num++) { System.out.println("\nRunning iteration " + num ); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } } private Exception getCause( Throwable e) { Exception cause = (Exception)e; while ( cause.getCause() != null ) { cause = (Exception)cause.getCause(); } return cause; } /** * This tests GetMeta retries. It deploys a simple Aggregate service that contains a collocated * Primitive service and a Primitive remote. The Primitive remote is simulated in this code. The * code starts a listener where the Aggregate sends GetMeta requests. The listener responds to * the Aggregate with its metadata only when an expected number of GetMeta retries is met. If * the Aggregate fails to send expected number of GetMeta requests, the listener will not adjust * its CountDownLatch and will cause this test to hang. * * @throws Exception */ public void GetMetaRetry() throws Exception { getMetaCountLatch = new CountDownLatch(MaxGetMetaRetryCount); Connection connection = getConnection(); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); ActiveMQDestination destination = (ActiveMQDestination)session.createQueue(primitiveServiceQueue1); ActiveMQMessageConsumer consumer = (ActiveMQMessageConsumer)session.createConsumer(destination); consumer.setMessageListener(new MessageListener() { public void onMessage(Message aMessage) { try { if ( isMetaRequest(aMessage)) { // Reply with metadata when retry count reaches defined threshold if ( getMetaRequestCount > 0 && getMetaRequestCount % MaxGetMetaRetryCount == 0 ) { JmsMessageContext msgContext = new JmsMessageContext(aMessage, primitiveServiceQueue1); JmsOutputChannel outputChannel = new JmsOutputChannel(); outputChannel.setServiceInputEndpoint(primitiveServiceQueue1); outputChannel.setServerURI(getBrokerUri()); Endpoint endpoint = msgContext.getEndpoint(); outputChannel.sendReply(getPrimitiveMetadata1(PrimitiveDescriptor1),endpoint, true); } getMetaRequestCount++; getMetaCountLatch.countDown(); // Count down to unblock the thread } } catch( Exception e) { e.printStackTrace(); } } }); consumer.start(); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String containerId = deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); Map<String,Object> appCtx = new HashMap(); appCtx.put(UimaAsynchronousEngine.ServerUri, String.valueOf(broker.getMasterConnectorURI())); appCtx.put(UimaAsynchronousEngine.Endpoint, "TopLevelTaeQueue"); appCtx.put(UimaAsynchronousEngine.CasPoolSize, Integer.valueOf(4) ); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 15 ); appCtx.put(UimaAsynchronousEngine.Timeout, 0 ); initialize(eeUimaEngine, appCtx); System.out.println("TestBroker.testGetMetaRetry()-Blocking On GetMeta Latch. Awaiting GetMeta Requests"); /*********************************************************************************/ /**** This Code Will Block Until Expected Number Of GetMeta Requests Arrive ******/ getMetaCountLatch.await(); /*********************************************************************************/ consumer.stop(); connection.stop(); eeUimaEngine.undeploy(containerId); eeUimaEngine.stop(); } public ProcessingResourceMetaData getPrimitiveMetadata1(String aDescriptor) throws Exception { ResourceSpecifier resourceSpecifier = UimaClassFactory.produceResourceSpecifier(aDescriptor); return ((AnalysisEngineDescription) resourceSpecifier).getAnalysisEngineMetaData(); } private static boolean deleteAllFiles(File directory) { if (directory.isDirectory()) { String[] files = directory.list(); for (int i=0; i<files.length; i++) { deleteAllFiles(new File(directory, files[i])); } } // Now have an empty directory or simple file return directory.delete(); } private class TestListener implements UimaASStatusCallbackListener, Runnable { private String casReferenceId = null; private TestUimaASExtended tester; private boolean running = false; private Object monitor = new Object(); public TestListener(TestUimaASExtended aTester) { tester = aTester; } public void collectionProcessComplete(EntityProcessStatus arg0) { // TODO Auto-generated method stub } public void entityProcessComplete(CAS aCAS, EntityProcessStatus aProcessStatus) { if (aProcessStatus.isException()) { if ( aProcessStatus instanceof UimaASProcessStatus ) { casReferenceId = ((UimaASProcessStatus)aProcessStatus).getCasReferenceId(); if ( casReferenceId != null ) { synchronized(monitor) { monitor.notifyAll(); } } } } } public void initializationComplete(EntityProcessStatus arg0) { // TODO Auto-generated method stub } public String getCasReferenceId() { synchronized( monitor) { while( casReferenceId == null ) { try { monitor.wait(); }catch( InterruptedException e) {} } } return casReferenceId; } public void doStop() { running = false; } public void run() { System.out.println("Stopping Callback Listener Thread"); } } }
uima-as/uimaj-as-activemq/src/test/java/org/apache/uima/ee/test/TestUimaASExtended.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.uima.ee.test; import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.util.HashMap; import java.util.Map; import java.util.concurrent.CountDownLatch; import javax.jms.Connection; import javax.jms.Message; import javax.jms.MessageListener; import javax.jms.Session; import org.apache.activemq.ActiveMQMessageConsumer; import org.apache.activemq.command.ActiveMQDestination; import org.apache.uima.UIMAFramework; import org.apache.uima.UIMA_IllegalStateException; import org.apache.uima.aae.UimaClassFactory; import org.apache.uima.aae.client.UimaASProcessStatus; import org.apache.uima.aae.client.UimaASStatusCallbackListener; import org.apache.uima.aae.client.UimaAsynchronousEngine; import org.apache.uima.aae.controller.Endpoint; import org.apache.uima.aae.error.ServiceShutdownException; import org.apache.uima.adapter.jms.JmsConstants; import org.apache.uima.adapter.jms.activemq.JmsOutputChannel; import org.apache.uima.adapter.jms.client.BaseUIMAAsynchronousEngine_impl; import org.apache.uima.adapter.jms.message.JmsMessageContext; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.cas.CAS; import org.apache.uima.collection.CollectionReader; import org.apache.uima.collection.CollectionReaderDescription; import org.apache.uima.collection.EntityProcessStatus; import org.apache.uima.ee.test.utils.BaseTestSupport; import org.apache.uima.internal.util.XMLUtils; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.resource.ResourceSpecifier; import org.apache.uima.resource.metadata.ProcessingResourceMetaData; import org.apache.uima.util.Level; import org.apache.uima.util.XMLInputSource; public class TestUimaASExtended extends BaseTestSupport { private static final int DEFAULT_HTTP_PORT = 8888; private CountDownLatch getMetaCountLatch = null; private static final int MaxGetMetaRetryCount = 2; private static final String primitiveServiceQueue1 = "NoOpAnnotatorQueue"; private static final String PrimitiveDescriptor1 = "resources/descriptors/analysis_engine/NoOpAnnotator.xml"; private int getMetaRequestCount = 0; /** * Tests Broker startup and shutdown */ public void testBrokerLifecycle() { System.out.println("-------------- testBrokerLifecycle -------------"); System.out.println("UIMA_HOME="+System.getenv("UIMA_HOME")+System.getProperty("file.separator")+"bin"+System.getProperty("file.separator")+"dd2spring.xsl"); } /** * Tests handling of multiple calls to initialize(). A subsequent call to * initialize should result in ResourceInitializationException. * * @throws Exception */ public void testInvalidInitializeCall() throws Exception { System.out.println("-------------- testInvalidInitializeCall -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); super.setExpectingServiceShutdown(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); try { initialize(eeUimaEngine, appCtx); waitUntilInitialized(); System.out.println("First Initialize Call Completed"); eeUimaEngine.initialize(appCtx); fail("Subsequent call to initialize() did not return expected exception:"+ UIMA_IllegalStateException.class+" Subsequent call to initialize succeeded with no error"); } catch( ResourceInitializationException e) { if ( e.getCause() != null && !(e.getCause() instanceof UIMA_IllegalStateException ) ) { fail("Invalid Exception Thrown. Expected:"+ UIMA_IllegalStateException.class+" Received:"+ e.getClass()); } else { System.out.println("Received Expected Exception:"+ UIMA_IllegalStateException.class); } } catch( ServiceShutdownException e) { // expected } finally { eeUimaEngine.stop(); } } /** * Tests deployment of a primitive Uima EE Service (PersontTitleAnnotator). Deploys the primitive * in the same jvm using Uima EE Client API and blocks on a monitor until the Uima Client calls initializationComplete() * method. Once the primitive service starts it is expected to send its metadata to the Uima client * which in turn notifies this object with a call to initializationComplete() where the monitor * is signaled to unblock the thread. This code will block if the Uima Client does not call * initializationComplete() * * @throws Exception */ public void testDeployPrimitiveService() throws Exception { System.out.println("-------------- testDeployPrimitiveService -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue", 0, EXCEPTION_LATCH); } /** * Tests a simple Aggregate with one remote Delegate and collocated Cas Multiplier * * @throws Exception */ public void testDeployAggregateService() throws Exception { System.out.println("-------------- testDeployAggregateService -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); System.setProperty(JmsConstants.SessionTimeoutOverride, "2500000"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 0, PROCESS_LATCH); } /** * Tests a simple Aggregate with one remote Delegate and collocated Cas Multiplier * * @throws Exception */ public void testDeployAggregateServiceWithTempReplyQueue() throws Exception { System.out.println("-------------- testDeployAggregateServiceWithTempReplyQueue -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateUsingRemoteTempQueue.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * Tests a simple Aggregate with one remote Delegate and collocated Cas Multiplier * * @throws Exception */ public void testProcessAggregateServiceWith1000Docs() throws Exception { System.out.println("-------------- testProcessAggregateServiceWith1000Docs -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithInternalCM1000Docs.xml"); // deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWith1MillionDocs.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessAggregateWithInnerAggregateCM() throws Exception { System.out.println("-------------- testProcessAggregateWithInnerAggregateCM() -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); System.setProperty(JmsConstants.SessionTimeoutOverride, "2500000"); deployService(eeUimaEngine, relativePath+"/Deploy_ComplexAggregateWithInnerAggregateCM.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * Tests exception thrown in the Uima EE Client when the Collection Reader is added after * the uima ee client is initialized * * @throws Exception */ /* public void testCollectionReader() throws Exception { System.out.println("-------------- testCollectionReader -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); // reduce the cas pool size and reply window appCtx.remove(UimaAsynchronousEngine.CasPoolSize); appCtx.put(UimaAsynchronousEngine.CasPoolSize, Integer.valueOf(2)); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); // set the collection reader File collectionReaderDescriptor = new File("resources/descriptors/collection_reader/FileSystemCollectionReader.xml"); CollectionReaderDescription collectionReaderDescription = UIMAFramework.getXMLParser() .parseCollectionReaderDescription(new XMLInputSource(collectionReaderDescriptor)); CollectionReader collectionReader = UIMAFramework .produceCollectionReader(collectionReaderDescription); eeUimaEngine.setCollectionReader(collectionReader); initialize(eeUimaEngine, appCtx); waitUntilInitialized(); runCrTest(eeUimaEngine, 7); eeUimaEngine.stop(); } */ /** * Tests exception thrown in the Uima EE Client when the Collection Reader is added after * the uima ee client is initialized * * @throws Exception */ public void testExceptionOnPostInitializeCollectionReaderInjection() throws Exception { System.out.println("-------------- testExceptionOnPostInitializeCollectionReaderInjection -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); super.setExpectingServiceShutdown(); initialize(eeUimaEngine, appCtx); waitUntilInitialized(); try { // Simulate plugging in a Collection Reader. This should throw // ResourceInitializationException since the client code has // been already initialized. eeUimaEngine.setCollectionReader(null); } catch( ResourceInitializationException e) { System.out.println("Received Expected Exception:"+ResourceInitializationException.class); // Expected return; } catch( Exception e) { fail("Invalid Exception Thrown. Expected:"+ ResourceInitializationException.class+" Received:"+ e.getClass()); } finally { eeUimaEngine.stop(); } fail("Expected" + ResourceInitializationException.class); } /** * Tests the shutdown due to a failure in the Flow Controller while diabling a delegate * * @throws Exception */ public void testTerminateOnFlowControllerExceptionOnDisable() throws Exception { System.out.println("-------------- testTerminateOnFlowControllerExceptionOnNextStep -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithFlowControllerExceptionOnDisable.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); //PC_LATCH); } /** * Tests the shutdown due to a failure in the Flow Controller when initializing * * @throws Exception */ public void testTerminateOnFlowControllerExceptionOnInitialization() throws Exception { System.out.println("-------------- testTerminateOnFlowControllerExceptionOnInitialization -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String[] containerIds = new String[2]; try { containerIds[0] = deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); containerIds[1] = deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithFlowControllerExceptionOnInitialization.xml"); fail("Expected ResourceInitializationException. Instead, the Aggregate Deployed Successfully"); } catch (ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("\nExpected Initialization Exception was received:"+cause); } catch (Exception e) { fail("Expected ResourceInitializationException. Instead Got:" + e.getClass()); } finally { eeUimaEngine.undeploy(containerIds[0]); eeUimaEngine.undeploy(containerIds[1]); } } /** * Tests the shutdown due to a failure in the Flow Controller when initializing AND have delegates to disable * (Jira issue UIMA-1171) * * @throws Exception */ public void testTerminateOnFlowControllerExceptionOnInitializationWithDisabledDelegates() throws Exception { System.out.println("-------------- testTerminateOnFlowControllerExceptionOnInitializationWithDisabledDelegates -----"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String containerId = null; try { containerId = deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithFlowControllerExceptionOnInitialization.xml"); fail("Expected ResourceInitializationException. Instead, the Aggregate Deployed Successfully"); } catch (ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("\nExpected Initialization Exception was received - cause: "+cause); } catch (Exception e) { fail("Expected ResourceInitializationException. Instead Got:" + e.getClass()); } finally { eeUimaEngine.undeploy(containerId); } } /** * Deploys a Primitive Uima EE service and sends 5 CASes to it. * * @throws Exception */ public void testPrimitiveServiceProcess() throws Exception { System.out.println("-------------- testPrimitiveServiceProcess -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue", 5, PROCESS_LATCH); } /** * Deploys a Primitive Uima EE service and sends 5 CASes to it. * * @throws Exception */ public void testSyncAggregateProcess() throws Exception { System.out.println("-------------- testSyncAggregateProcess -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_MeetingDetectorAggregate.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"MeetingDetectorQueue", 5, PROCESS_LATCH); } /** * Deploys a Primitive Uima EE service and sends 5 CASes to it. * * @throws Exception */ public void testPrimitiveServiceProcessPingFailure() throws Exception { System.out.println("-------------- testPrimitiveServiceProcessPingFailure -------------"); // Instantiate Uima EE Client final BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service final String containerID = deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "PersonTitleAnnotatorQueue" ); // Set an explicit getMeta (Ping)timeout appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 2000 ); // Set an explicit process timeout so to test the ping on timeout appCtx.put(UimaAsynchronousEngine.Timeout, 1000 ); // Spin a thread and wait for awhile before killing the remote service. // This will cause the client to timeout waiting for a CAS reply and // to send a Ping message to test service availability. The Ping times // out and causes the client API to stop. new Thread() { public void run() { Object mux = new Object(); synchronized( mux ) { try { mux.wait(500); // Undeploy service container eeUimaEngine.undeploy(containerID); } catch (Exception e) {} } } }.start(); try { // RuntimeException is expected due to failure runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue", 500, EXCEPTION_LATCH); } catch( RuntimeException e) {} } /** * Tests error handling on delegate timeout. The Delegate is started as remote, * the aggregate initializes and the client starts sending CASes. After a short * while the client kills the remote delegate. The aggregate receives a CAS * timeout and disables the delegate. A timed out CAS is sent to the next * delegate in the pipeline. ALL 1000 CASes are returned to the client. * * @throws Exception */ public void testDelegateTimeoutAndDisable() throws Exception { System.out.println("-------------- testDelegateTimeoutAndDisable -------------"); // Instantiate Uima EE Client final BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service final String containerID = deployService(eeUimaEngine, relativePath+"/Deploy_RoomNumberAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_MeetingDetectorTAE_RemoteRoomNumberDisable.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "MeetingDetectorTaeQueue" ); // Set an explicit getMeta (Ping)timeout appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 2000 ); // Set an explicit process timeout so to test the ping on timeout appCtx.put(UimaAsynchronousEngine.Timeout, 1000 ); // Spin a thread and wait for awhile before killing the remote service. // This will cause the client to timeout waiting for a CAS reply and // to send a Ping message to test service availability. The Ping times // out and causes the client API to stop. new Thread() { public void run() { Object mux = new Object(); synchronized( mux ) { try { mux.wait(500); // Undeploy service container eeUimaEngine.undeploy(containerID); } catch (Exception e) {} } } }.start(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"MeetingDetectorTaeQueue", 1000, PROCESS_LATCH); } /** * This test kills a remote Delegate while in the middle of processing 1000 CASes. * The CAS timeout error handling disables the delegate and forces ALL CASes * from the Pending Reply List to go through Error Handler. The Flow Controller * is configured to continueOnError and CASes that timed out are allowed to * continue to the next delegate. ALL 1000 CASes are accounted for in the * NoOp Annotator that is last in the flow. * * @throws Exception */ public void testDisableDelegateOnTimeoutWithCM() throws Exception { System.out.println("-------------- testDisableDelegateOnTimeoutWithCM -------------"); // Instantiate Uima EE Client final BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service final String containerID = deployService(eeUimaEngine, relativePath+"/Deploy_RoomNumberAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_MeetingDetectorTAEWithCM_RemoteRoomNumberDisable.xml"); super.setExpectingServiceShutdown(); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "MeetingDetectorTaeQueue" ); // Set an explicit getMeta (Ping)timeout appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 2000 ); // Set an explicit process timeout so to test the ping on timeout appCtx.put(UimaAsynchronousEngine.Timeout, 1000 ); // Spin a thread and wait for awhile before killing the remote service. // This will cause the client to timeout waiting for a CAS reply and // to send a Ping message to test service availability. The Ping times // out and causes the client API to stop. new Thread() { public void run() { Object mux = new Object(); synchronized( mux ) { try { mux.wait(300); // Undeploy service container eeUimaEngine.undeploy(containerID); } catch (Exception e) {} } } }.start(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"MeetingDetectorTaeQueue", 1, PROCESS_LATCH); } /** * Tests Uima EE client ability to test sendAndReceive in multiple/concurrent threads * It spawns 4 thread each sending 100 CASes to a Primitive Uima EE service * @throws Exception */ public void testSynchCallProcessWithMultipleThreads() throws Exception { System.out.println("-------------- testSynchCallProcessWithMultipleThreads -------------"); int howManyCASesPerRunningThread = 100; int howManyRunningThreads = 4; super.setExpectingServiceShutdown(); runTestWithMultipleThreads(relativePath+"/Deploy_PersonTitleAnnotator.xml", "PersonTitleAnnotatorQueue", howManyCASesPerRunningThread, howManyRunningThreads, 0, 0 ); } /** * * @throws Exception */ public void testPrimitiveProcessCallWithLongDelay() throws Exception { System.out.println("-------------- testPrimitiveProcessCallWithLongDelay -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); super.setExpectingServiceShutdown(); // We expect 18000ms to be spent in process method super.setExpectedProcessTime(6000); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"NoOpAnnotatorQueueLongDelay" ); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"NoOpAnnotatorQueueLongDelay", 4, PROCESS_LATCH, true); } /** * Tests time spent in process CAS. The CAS is sent to three remote delegates each * with a delay of 6000ms in the process method. The aggregate is expected to sum * up the time spent in each annotator process method. The final sum is returned * to the client (the test) and compared against expected 18000ms. The test actually * allows for 20ms margin to account for any overhead (garbage collecting, slow cpu, etc) * * @throws Exception */ public void testAggregateProcessCallWithLongDelay() throws Exception { System.out.println("-------------- testAggregateProcessCallWithLongDelay -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Services each with 6000ms delay in process() deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorAWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorBWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorCWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithLongDelay.xml"); super.setExpectingServiceShutdown(); // We expect 18000ms to be spent in process method super.setExpectedProcessTime(18000); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue" ); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); // make sure we only send 1 CAS at a time appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH, true); } /** * Tests Aggregate configuration where the Cas Multiplier delegate is the * last delegate in the Aggregate's pipeline * * @throws Exception */ public void testAggregateProcessCallWithLastCM() throws Exception { System.out.println("-------------- testAggregateProcessCallWithLastCM -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy Uima EE Primitive Services each with 6000ms delay in process() deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithLastCM.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH, true); } /** * Tests shutdown while running with multiple/concurrent threads * The Annotator throws an exception and the Aggregate error handling is setup to terminate * on the first error. * * @throws Exception */ public void testTimeoutInSynchCallProcessWithMultipleThreads() throws Exception { System.out.println("-------------- testTimeoutInSynchCallProcessWithMultipleThreads -------------"); int howManyCASesPerRunningThread = 2; int howManyRunningThreads = 4; super.setExpectingServiceShutdown(); int processTimeout = 2000; int getMetaTimeout = 500; runTestWithMultipleThreads(relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml", "NoOpAnnotatorQueueLongDelay", howManyCASesPerRunningThread, howManyRunningThreads, processTimeout, getMetaTimeout ); } /** * Tests shutdown while running with multiple/concurrent threads * The Annotator throws an exception and the Aggregate error handling is setup to terminate * on the first error. * * @throws Exception */ public void testTimeoutFailureInSynchCallProcessWithMultipleThreads() throws Exception { System.out.println("-------------- testTimeoutFailureInSynchCallProcessWithMultipleThreads -------------"); int howManyCASesPerRunningThread = 1000; int howManyRunningThreads = 4; super.setExpectingServiceShutdown(); int processTimeout = 2000; int getMetaTimeout = 500; runTestWithMultipleThreads(relativePath+"/Deploy_NoOpAnnotator.xml", "NoOpAnnotatorQueue", howManyCASesPerRunningThread, howManyRunningThreads, 2000, 1000, true ); } /** * Tests a parallel flow in the Uima EE aggregate. * * @throws Exception */ public void testProcessWithParallelFlow() throws Exception { System.out.println("-------------- testProcessWithParallelFlow -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * Tests ability to disable one delegate in parallel flow and continue * * @throws Exception */ public void testDisableDelegateInParallelFlow() throws Exception { System.out.println("-------------- testDisableDelegateInParallelFlow -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } /** * * @throws Exception */ public void testTimeoutDelegateInParallelFlows() throws Exception { System.out.println("-------------- testTimeoutDelegateInParallelFlows -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator3.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlows.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // Set an explicit process timeout so one of the 1st parallels is disabled but 2nd parallel flow continues. appCtx.put(UimaAsynchronousEngine.Timeout, 20000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); addExceptionToignore(org.apache.uima.aae.error.UimaASProcessCasTimeout.class); runTest(appCtx, eeUimaEngine, null, null, 1, PROCESS_LATCH); } /** * Tests Timeout logic * @throws Exception */ public void testRemoteDelegateTimeout() throws Exception { System.out.println("-------------- testRemoteDelegateTimeout -------------"); System.out.println("The Aggregate sends 2 CASes to the NoOp Annotator which"); System.out.println("delays each CAS for 6000ms. The timeout is set to 4000ms"); System.out.println("Two CAS retries are expected"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithLongDelayDelegate.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // The Remote NoOp delays each CAS for 6000ms. The Aggregate sends two CASes so adjust // client timeout to be just over 12000ms. appCtx.put(UimaAsynchronousEngine.Timeout, 13000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); addExceptionToignore(org.apache.uima.aae.error.UimaASProcessCasTimeout.class); runTest(appCtx, eeUimaEngine, null, null, 1, PROCESS_LATCH); } /** * Tests Timeout logic * @throws Exception */ public void testDisableOnRemoteDelegatePingTimeout() throws Exception { System.out.println("-------------- testDisableOnRemoteDelegatePingTimeout -------------"); System.out.println("The Aggregate sends 2 CASes to the NoOp Annotator which"); System.out.println("delays each CAS for 6000ms. The timeout is set to 4000ms"); System.out.println("Two CAS retries are expected"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String delegateContainerId = deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithLongDelayDelegate.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // The Remote NoOp delays each CAS for 6000ms. The Aggregate sends two CASes so adjust // client timeout to be just over 12000ms. appCtx.put(UimaAsynchronousEngine.Timeout, 13000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); addExceptionToignore(org.apache.uima.aae.error.UimaASProcessCasTimeout.class); // Remove container with the remote NoOp delegate so that we can test // the CAS Process and Ping timeout. eeUimaEngine.undeploy(delegateContainerId); // Send the CAS and handle exception runTest(appCtx, eeUimaEngine, null, null, 1, EXCEPTION_LATCH); } public void testDeployAggregateWithCollocatedAggregateService() throws Exception { System.out.println("-------------- testDeployAggregateWithCollocatedAggregateService -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_ComplexAggregate.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 10, PROCESS_LATCH); } public void testProcessWithAggregateUsingCollocatedMultiplier() throws Exception { System.out.println("-------------- testProcessWithAggregateUsingCollocatedMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsingRemoteMultiplier() throws Exception { System.out.println("-------------- testProcessWithAggregateUsingRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMultiplier.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testClientWithAggregateMultiplier() throws Exception { System.out.println("-------------- testClientWithAggregateMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateMultiplier.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue" ); // reduce the cas pool size and reply window appCtx.remove(UimaAsynchronousEngine.ShadowCasPoolSize); appCtx.put(UimaAsynchronousEngine.ShadowCasPoolSize, Integer.valueOf(2)); runTest(appCtx, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testClientProcessWithRemoteMultiplier() throws Exception { System.out.println("-------------- testClientProcessWithRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"TestMultiplierQueue" ); appCtx.remove(UimaAsynchronousEngine.ShadowCasPoolSize); appCtx.put(UimaAsynchronousEngine.ShadowCasPoolSize, Integer.valueOf(1)); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TestMultiplierQueue", 1, PROCESS_LATCH); } public void testClientProcessWithComplexAggregateRemoteMultiplier() throws Exception { System.out.println("-------------- testClientProcessWithComplexAggregateRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith10Docs_1.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_CasMultiplierAggregateWithRemoteCasMultiplier.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsing2RemoteMultipliers() throws Exception { System.out.println("-------------- testProcessWithAggregateUsing2RemoteMultipliers -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith10Docs_1.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith10Docs_2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWith2RemoteMultipliers.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessWithAggregateUsing2CollocatedMultipliers() throws Exception { System.out.println("-------------- testProcessWithAggregateUsing2CollocatedMultipliers -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWith2Multipliers.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testProcessAggregateWithInnerCMAggregate() throws Exception { System.out.println("-------------- testProcessAggregateWithInnerCMAggregate -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_TopAggregateWithInnerAggregateCM.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testBlueJDeployment() throws Exception { System.out.println("-------------- testBlueJDeployment -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy replicated services for the inner remote aggregate CM deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy an instance of a remote aggregate CM containing a collocated Cas Multiplier // CM --> Replicated Remote Primitive --> NoOp CC deployService(eeUimaEngine, relativePath+"/Deploy_CMAggregateWithCollocatedCM.xml"); // Deploy top level Aggregate Cas Multiplier with 2 collocated Cas Multipliers // CM1 --> CM2 --> Remote AggregateCM --> Candidate Answer --> CC deployService(eeUimaEngine, relativePath+"/Deploy_TopLevelBlueJAggregateCM.xml"); super.setExpectingServiceShutdown(); runTest2(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 10, PROCESS_LATCH); } public void testTypesystemMergeWithMultiplier() throws Exception { System.out.println("-------------- testTypesystemMergeWithMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithMergedTypes.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testStopAggregateWithRemoteMultiplier() throws Exception { System.out.println("-------------- testStopAggregateWithRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplier.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithExceptionOn5thCAS.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMultiplier.xml"); super.setExpectingServiceShutdown(); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testCancelProcessAggregateWithCollocatedMultiplier() throws Exception { System.out.println("-------------- testCancelProcessAggregateWithCollocatedMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_ComplexAggregateWith1MillionDocs.xml"); super.setExpectingServiceShutdown(); // Spin a thread to cancel Process after 20 seconds spinShutdownThread( eeUimaEngine, 20000 ); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1,PROCESS_LATCH); } public void testCancelProcessAggregateWithRemoteMultiplier() throws Exception { System.out.println("-------------- testStopAggregateWithRemoteMultiplier -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_RemoteCasMultiplierWith1MillionDocs.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithRemoteMultiplier.xml"); super.setExpectingServiceShutdown(); // Spin a thread to cancel Process after 20 seconds spinShutdownThread( eeUimaEngine, 20000 ); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1,PROCESS_LATCH);//EXCEPTION_LATCH); } /** * Test correct reply from the service when its process method fails. Deploys the Primitive * Service ( NoOp Annotator) that is configured to throw an exception on every CAS. The expected * behavior is for the Primitive Service to return a reply with an Exception. This code blocks * on a Count Down Latch, until the exception is returned from the service. When the exception is * received the latch is opened indicating success. * * @throws Exception */ public void testPrimitiveServiceResponseOnException() throws Exception { System.out.println("-------------- testPrimitiveServiceResponseOnException -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); super.setExpectingServiceShutdown(); // Deploy Uima EE Primitive Service // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"NoOpAnnotatorQueue", 1, EXCEPTION_LATCH); } public void testProcessParallelFlowWithDelegateFailure() throws Exception { System.out.println("-------------- testProcessParallelFlowWithDelegateFailure -------------"); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); UIMAFramework.getLogger().setLevel(Level.FINE); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); // Deploy top level aggregate service deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlowTerminateOnDelegateFailure.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); //PC_LATCH); } /** Tests that the thresholdAction is taken when thresholdCount errors occur in the last thresholdWindow CASes. * Aggregate has two annotators, first fails with increasing frequency (on CASes 10 19 27 34 40 45 49 52 54) * and is disabled after 3 errors in a window of 7 (49,52,54) * Second annotator counts the CASes that reach it and verifies that it sees all but the 9 failures. * It throws an exception if the first is disabled after too many or too few errors. * * @throws Exception */ public void testErrorThresholdWindow() throws Exception { System.out.println("-------------- testErrorThresholdWindow -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); UIMAFramework.getLogger().setLevel(Level.FINE); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithThresholdWindow.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); // Set an explicit CPC timeout as exceptions thrown in the 2nd annotator's CPC don't reach the client. appCtx.put(UimaAsynchronousEngine.CpcTimeout, 20000 ); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); //PC_LATCH); } public void testProcessParallelFlowWithDelegateDisable() throws Exception { System.out.println("-------------- testProcessParallelFlowWithDelegateDisable -------------"); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlowDisableOnDelegateFailure.xml"); addExceptionToignore(org.apache.uima.aae.error.UimaEEServiceException.class); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); //PC_LATCH); } public void testPrimitiveShutdownOnTooManyErrors() throws Exception { System.out.println("-------------- testPrimitiveShutdownOnTooManyErrors -------------"); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate service deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, CPC_LATCH ); } public void testClientHttpTunnelling() throws Exception { System.out.println("-------------- testClientHttpTunnelling -------------"); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails String httpURI = addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,httpURI,"NoOpAnnotatorQueue", 1, CPC_LATCH ); } public void testClientHttpTunnellingToAggregate() throws Exception { System.out.println("-------------- testClientHttpTunnellingToAggregate -------------"); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails String httpURI = addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,httpURI,"TopLevelTaeQueue", 1, CPC_LATCH ); } public void testClientHttpTunnellingWithDoubleByteText() throws Exception { System.out.println("-------------- testClientHttpTunnellingWithDoubleByteText -------------"); try { File file = new File(relativeDataPath+"/DoubleByteText.txt"); System.out.println("Checking for existence of File:"+file.getAbsolutePath()); // Process only if the file exists if ( file.exists()) { System.out.println(" *** DoubleByteText.txt exists and will be sent through http connector."); System.out.println(" *** If the vanilla activemq release is being used,"); System.out.println(" *** and DoubleByteText.txt is bigger than 64KB or so, this test case will hang."); System.out.println(" *** To fix, override the classpath with the jar files in and under the"); System.out.println(" *** apache-uima-as/uima-as-distr/src/main/apache-activemq-X.y.z directory"); System.out.println(" *** in the apache-uima-as source distribution."); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails String httpURI = addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); InputStream fis = new FileInputStream(file); Reader rd = new InputStreamReader(fis, "UTF-8"); BufferedReader in = new BufferedReader(rd); // Set the double-byte text. This is what will be sent to the service String line = in.readLine(); super.setDoubleByteText(line); int err = XMLUtils.checkForNonXmlCharacters(line); if (err >= 0) { fail("Illegal XML char at offset " + err); } super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine,httpURI,"NoOpAnnotatorQueue", 1, CPC_LATCH ); } } catch( Exception e) { // Double-Byte Text file not present. Continue on with the next test e.printStackTrace(); fail("Could not complete test"); } } public void testAggregateHttpTunnelling() throws Exception { System.out.println("-------------- testAggregateHttpTunnelling -------------"); // Add HTTP Connector to the broker. The connector will use port 8888. If this port is not available the test fails addHttpConnector(DEFAULT_HTTP_PORT); // Create Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithHttpDelegate.xml"); super.setExpectingServiceShutdown(); // Initialize and run the Test. Wait for a completion and cleanup resources. runTest(null,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 10, CPC_LATCH ); } /** * Tests exception thrown in the Uima EE Client when the Collection Reader is added after * the uima ee client is initialized * * @throws Exception */ public void testCollectionReader() throws Exception { System.out.println("-------------- testCollectionReader -------------"); // Instantiate Uima EE Client BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_PersonTitleAnnotator.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()),"PersonTitleAnnotatorQueue" ); // reduce the cas pool size and reply window appCtx.remove(UimaAsynchronousEngine.CasPoolSize); appCtx.put(UimaAsynchronousEngine.CasPoolSize, Integer.valueOf(2)); appCtx.remove(UimaAsynchronousEngine.ReplyWindow); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 1); super.setExpectingServiceShutdown(); // set the collection reader String filename = super.getFilepathFromClassloader("descriptors/collection_reader/FileSystemCollectionReader.xml"); if ( filename == null ) { fail("Unable to find file:"+"descriptors/collection_reader/FileSystemCollectionReader.xml"+ "in classloader"); } File collectionReaderDescriptor = new File(filename); CollectionReaderDescription collectionReaderDescription = UIMAFramework.getXMLParser() .parseCollectionReaderDescription(new XMLInputSource(collectionReaderDescriptor)); CollectionReader collectionReader = UIMAFramework .produceCollectionReader(collectionReaderDescription); eeUimaEngine.setCollectionReader(collectionReader); initialize(eeUimaEngine, appCtx); waitUntilInitialized(); runCrTest(eeUimaEngine, 7); synchronized(this) { wait(50); } eeUimaEngine.stop(); } public void testAsynchronousTerminate() throws Exception { System.out.println("-------------- testAsynchronousTerminate -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); initialize(eeUimaEngine, appCtx); // Wait until the top level service returns its metadata waitUntilInitialized(); super.setExpectingServiceShutdown(); CAS cas = eeUimaEngine.getCAS(); System.out.println(" Sending CAS to kick off aggregate w/colocated CasMultiplier"); eeUimaEngine.sendCAS(cas); System.out.println(" Waiting 1 seconds"); Thread.sleep(1000); System.out.println(" Trying to stop service"); eeUimaEngine.stop(); System.out.println(" stop() returned!"); } public void testCallbackListenerOnFailure() throws Exception { System.out.println("-------------- testCallbackListenerOnFailure -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "NoOpAnnotatorQueue" ); initialize(eeUimaEngine, appCtx); // Wait until the top level service returns its metadata waitUntilInitialized(); super.setExpectingServiceShutdown(); CAS cas = eeUimaEngine.getCAS(); // Register special callback listener. This listener will receive // an exception with the Cas Reference id. TestListener listener = new TestListener(this); eeUimaEngine.addStatusCallbackListener(listener); // Send request out and save Cas Reference id String casReferenceId = eeUimaEngine.sendCAS(cas); // Spin a callback listener thread Thread t = new Thread(listener); t.start(); // Wait for reply CAS. This method blocks String cRefId = listener.getCasReferenceId(); try { // Test if received Cas Reference Id matches the id of the CAS sent out if ( !cRefId.equals(casReferenceId)) { fail( "Received Invalid Cas Reference Id. Expected:"+casReferenceId+" Received: "+cRefId); } else { System.out.println("Received Expected Cas Identifier:"+casReferenceId); } } finally { // Stop callback listener thread listener.doStop(); eeUimaEngine.stop(); } } public void testTerminateOnInitializationFailure() throws Exception { System.out.println("-------------- testTerminateOnInitializationFailure -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); try { deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); exceptionCountLatch = new CountDownLatch(1); initialize(eeUimaEngine, appCtx); fail("Expected ResourceInitializationException. Instead, the Aggregate Reports Successfull Initialization"); } catch( ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("Expected Initialization Exception was received:"+cause); eeUimaEngine.stop(); } catch( Exception e) { fail("Expected ResourceInitializationException. Instead Got:"+e.getClass()); } } /** * Tests shutdown due to delegate broker missing. The Aggregate is configured to * retry getMeta 3 times and continue. The client times out after 20 seconds and forces the * shutdown. NOTE: The Spring listener tries to recover JMS connection on failure. In this * test a Listener to remote delegate cannot be established due to a missing broker. The * Listener is setup to retry every 60 seconds. After failure, the listener goes to sleep * for 60 seconds and tries again. This results in a 60 second delay at the end of this test. * * @throws Exception */ public void testTerminateOnInitializationFailureWithDelegateBrokerMissing() throws Exception { System.out.println("-------------- testTerminateOnInitializationFailureWithDelegateBrokerMissing -------------"); System.out.println("---------------------- The Uima Client Times Out After 20 seconds --------------------------"); System.out.println("-- The test requires 1 minute to complete due to 60 second delay in Spring Listener ----"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); try { // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorTerminateOnDelegateBadBrokerURL.xml"); // Initialize and run the Test. Wait for a completion and cleanup resources. Map<String, Object> appCtx = new HashMap(); appCtx.put(UimaAsynchronousEngine.ServerUri, String.valueOf(broker.getMasterConnectorURI())); appCtx.put(UimaAsynchronousEngine.Endpoint, "TopLevelTaeQueue"); appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 20000); runTest(appCtx,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 1, EXCEPTION_LATCH ); fail("Expected ResourceInitializationException. Instead, the Aggregate Reports Successfull Initialization"); } catch( ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("Expected Initialization Exception was received:"+cause); } catch( Exception e) { fail("Expected ResourceInitializationException. Instead Got:"+e.getClass()); } eeUimaEngine.stop(); } /** * Tests shutdown due to delegate broker missing. The Aggregate is configured to * retry getMeta 3 times and continue. The client times out after 20 seconds and forces the * shutdown. NOTE: The Spring listener tries to recover JMS connection on failure. In this * test a Listener to remote delegate cannot be established due to a missing broker. The * Listener is setup to retry every 60 seconds. After failure, the listener goes to sleep * for 60 seconds and tries again. This results in a 60 second delay at the end of this test. * * @throws Exception */ public void testDisableOnInitializationFailureWithDelegateBrokerMissing() throws Exception { System.out.println("-------------- testDisableOnInitializationFailureWithDelegateBrokerMissing() -------------"); System.out.println("---------------------- The Uima Client Times Out After 20 seconds --------------------------"); System.out.println("-- The test requires 1 minute to complete due to 60 second delay in Spring Listener ----"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); try { // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithHttpDelegate.xml"); // Initialize and run the Test. Wait for a completion and cleanup resources. Map<String, Object> appCtx = new HashMap(); appCtx.put(UimaAsynchronousEngine.ServerUri, String.valueOf(broker.getMasterConnectorURI())); appCtx.put(UimaAsynchronousEngine.Endpoint, "TopLevelTaeQueue"); appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 20000); runTest(appCtx,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 1, PROCESS_LATCH ); } catch( Exception e) { fail("Expected Success. Instead Received Exception:"+e.getClass()); } eeUimaEngine.stop(); } /** * Tests shutdown due to delegate broker missing. The Aggregate is configured to * terminate on getMeta timeout. * * @throws Exception */ public void testTerminateOnInitializationFailureWithAggregateForcedShutdown() throws Exception { System.out.println("-------------- testTerminateOnInitializationFailureWithAggregateForcedShutdown -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // Initialize and run the Test. Wait for a completion and cleanup resources. try { // Deploy remote service deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); // Deploy top level aggregate that communicates with the remote via Http Tunnelling deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotatorWithHttpDelegateNoRetries.xml"); super.setExpectingServiceShutdown(); runTest(null,eeUimaEngine, String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue", 10, EXCEPTION_LATCH ); fail("Expected ResourceInitializationException. Instead, the Aggregate Reports Successfull Initialization"); } catch( ResourceInitializationException e) { Exception cause = getCause(e); System.out.println("Expected Initialization Exception was received:"+cause); } catch( Exception e) { fail("Expected ResourceInitializationException. Instead Got:"+e.getClass()); } eeUimaEngine.stop(); } /** * This tests some of the error handling. Each annotator writes a file and throws an exception. * After the CAS is processed the presence/absence of certain files indicates success or failure. * The first annotator fails and lets the CAS proceed, so should write only one file. * The second annotator fails and is retried 2 times, and doesn't let the CAS proceed, so should write 3 files. * The third annotator should not see the CAS, so should not write any files * * @throws Exception */ public void testContinueOnRetryFailure() throws Exception { System.out.println("-------------- testContinueOnRetryFailure -------------"); File tempDir = new File("temp"); deleteAllFiles(tempDir); tempDir.mkdir(); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_WriterAnnotatorA.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_WriterAnnotatorB.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithContinueOnRetryFailures.xml"); runTest(null, eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); if ( !(new File(tempDir, "WriterAnnotatorB.3")).exists() || (new File(tempDir, "WriterAnnotatorB.4")).exists()) { fail("Second annotator should have run 3 times"); } if ((new File(tempDir, "WriterAnnotatorC.1")).exists()) { fail("Third annotator should not have seen CAS"); } } /** * Test use of a JMS Service Adapter. * Invoke from a synchronous aggregate to emulate usage from RunAE or RunCPE. * * @throws Exception */ public void testJmsServiceAdapter() throws Exception { System.out.println("-------------- testJmsServiceAdapter -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsService.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } public void testJmsServiceAdapterWithException() throws Exception { System.out.println("-------------- testJmsServiceAdapterWithException -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithException.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsService.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testJmsServiceAdapterWithProcessTimeout() throws Exception { System.out.println("-------------- testJmsServiceAdapterWithProcessTimeout -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotatorWithLongDelay.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsServiceLongDelay.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testJmsServiceAdapterWithGetmetaTimeout() throws Exception { System.out.println("-------------- testJmsServiceAdapterWithGetmetaTimeout -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); addExceptionToignore(ResourceInitializationException.class); deployService(eeUimaEngine, relativePath+"/Deploy_SyncAggregateWithJmsService.xml"); Map<String, Object> appCtx = buildContext( String.valueOf(broker.getMasterConnectorURI()), "TopLevelTaeQueue" ); appCtx.put(UimaAsynchronousEngine.GetMetaTimeout, 1500 ); runTest(appCtx,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, EXCEPTION_LATCH); } public void testDeployAgainAndAgain() throws Exception { System.out.println("-------------- testDeployAgainAndAgain -------------"); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); // here or in the loop, no change. for (int num=1; num<=50; num++) { System.out.println("\nRunning iteration " + num ); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_NoOpAnnotator2.xml"); deployService(eeUimaEngine, relativePath+"/Deploy_AggregateWithParallelFlow.xml"); runTest(null,eeUimaEngine,String.valueOf(broker.getMasterConnectorURI()),"TopLevelTaeQueue", 1, PROCESS_LATCH); } } private Exception getCause( Throwable e) { Exception cause = (Exception)e; while ( cause.getCause() != null ) { cause = (Exception)cause.getCause(); } return cause; } /** * This tests GetMeta retries. It deploys a simple Aggregate service that contains a collocated * Primitive service and a Primitive remote. The Primitive remote is simulated in this code. The * code starts a listener where the Aggregate sends GetMeta requests. The listener responds to * the Aggregate with its metadata only when an expected number of GetMeta retries is met. If * the Aggregate fails to send expected number of GetMeta requests, the listener will not adjust * its CountDownLatch and will cause this test to hang. * * @throws Exception */ public void GetMetaRetry() throws Exception { getMetaCountLatch = new CountDownLatch(MaxGetMetaRetryCount); Connection connection = getConnection(); connection.start(); Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE); ActiveMQDestination destination = (ActiveMQDestination)session.createQueue(primitiveServiceQueue1); ActiveMQMessageConsumer consumer = (ActiveMQMessageConsumer)session.createConsumer(destination); consumer.setMessageListener(new MessageListener() { public void onMessage(Message aMessage) { try { if ( isMetaRequest(aMessage)) { // Reply with metadata when retry count reaches defined threshold if ( getMetaRequestCount > 0 && getMetaRequestCount % MaxGetMetaRetryCount == 0 ) { JmsMessageContext msgContext = new JmsMessageContext(aMessage, primitiveServiceQueue1); JmsOutputChannel outputChannel = new JmsOutputChannel(); outputChannel.setServiceInputEndpoint(primitiveServiceQueue1); outputChannel.setServerURI(getBrokerUri()); Endpoint endpoint = msgContext.getEndpoint(); outputChannel.sendReply(getPrimitiveMetadata1(PrimitiveDescriptor1),endpoint, true); } getMetaRequestCount++; getMetaCountLatch.countDown(); // Count down to unblock the thread } } catch( Exception e) { e.printStackTrace(); } } }); consumer.start(); BaseUIMAAsynchronousEngine_impl eeUimaEngine = new BaseUIMAAsynchronousEngine_impl(); String containerId = deployService(eeUimaEngine, relativePath+"/Deploy_AggregateAnnotator.xml"); Map<String,Object> appCtx = new HashMap(); appCtx.put(UimaAsynchronousEngine.ServerUri, String.valueOf(broker.getMasterConnectorURI())); appCtx.put(UimaAsynchronousEngine.Endpoint, "TopLevelTaeQueue"); appCtx.put(UimaAsynchronousEngine.CasPoolSize, Integer.valueOf(4) ); appCtx.put(UimaAsynchronousEngine.ReplyWindow, 15 ); appCtx.put(UimaAsynchronousEngine.Timeout, 0 ); initialize(eeUimaEngine, appCtx); System.out.println("TestBroker.testGetMetaRetry()-Blocking On GetMeta Latch. Awaiting GetMeta Requests"); /*********************************************************************************/ /**** This Code Will Block Until Expected Number Of GetMeta Requests Arrive ******/ getMetaCountLatch.await(); /*********************************************************************************/ consumer.stop(); connection.stop(); eeUimaEngine.undeploy(containerId); eeUimaEngine.stop(); } public ProcessingResourceMetaData getPrimitiveMetadata1(String aDescriptor) throws Exception { ResourceSpecifier resourceSpecifier = UimaClassFactory.produceResourceSpecifier(aDescriptor); return ((AnalysisEngineDescription) resourceSpecifier).getAnalysisEngineMetaData(); } private static boolean deleteAllFiles(File directory) { if (directory.isDirectory()) { String[] files = directory.list(); for (int i=0; i<files.length; i++) { deleteAllFiles(new File(directory, files[i])); } } // Now have an empty directory or simple file return directory.delete(); } private class TestListener implements UimaASStatusCallbackListener, Runnable { private String casReferenceId = null; private TestUimaASExtended tester; private boolean running = false; private Object monitor = new Object(); public TestListener(TestUimaASExtended aTester) { tester = aTester; } public void collectionProcessComplete(EntityProcessStatus arg0) { // TODO Auto-generated method stub } public void entityProcessComplete(CAS aCAS, EntityProcessStatus aProcessStatus) { if (aProcessStatus.isException()) { if ( aProcessStatus instanceof UimaASProcessStatus ) { casReferenceId = ((UimaASProcessStatus)aProcessStatus).getCasReferenceId(); if ( casReferenceId != null ) { synchronized(monitor) { monitor.notifyAll(); } } } } } public void initializationComplete(EntityProcessStatus arg0) { // TODO Auto-generated method stub } public String getCasReferenceId() { synchronized( monitor) { while( casReferenceId == null ) { try { monitor.wait(); }catch( InterruptedException e) {} } } return casReferenceId; } public void doStop() { running = false; } public void run() { System.out.println("Stopping Callback Listener Thread"); } } }
UIMA-1293 - committing Burn's new test cases git-svn-id: dd361d0afbe84f3eb97f7061549e905c2c5df34b@747548 13f79535-47bb-0310-9956-ffa450edef68
uima-as/uimaj-as-activemq/src/test/java/org/apache/uima/ee/test/TestUimaASExtended.java
UIMA-1293 - committing Burn's new test cases
Java
apache-2.0
25b5bf78028117dc12eced428c5b68cc137d87b1
0
gbif/registry,gbif/registry
/* * Copyright 2020 Global Biodiversity Information Facility (GBIF) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gbif.registry.search.dataset; import org.gbif.api.model.registry.search.DatasetSearchParameter; import org.gbif.api.vocabulary.Continent; import org.gbif.api.vocabulary.Country; import org.gbif.api.vocabulary.DatasetSubtype; import org.gbif.api.vocabulary.DatasetType; import org.gbif.api.vocabulary.EndpointType; import org.gbif.api.vocabulary.License; import org.gbif.registry.search.dataset.common.EsFieldMapper; import java.util.List; import java.util.Map; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FieldValueFactorFunctionBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class DatasetEsFieldMapper implements EsFieldMapper<DatasetSearchParameter> { private static final ImmutableBiMap<DatasetSearchParameter, String> SEARCH_TO_ES_MAPPING = ImmutableBiMap.<DatasetSearchParameter, String>builder() .put(DatasetSearchParameter.TAXON_KEY, "taxonKey") .put(DatasetSearchParameter.CONTINENT, "continent") .put(DatasetSearchParameter.COUNTRY, "country") .put(DatasetSearchParameter.PUBLISHING_COUNTRY, "publishingCountry") .put(DatasetSearchParameter.YEAR, "year") .put(DatasetSearchParameter.DECADE, "decade") .put(DatasetSearchParameter.HOSTING_ORG, "hostingOrganizationKey") .put(DatasetSearchParameter.KEYWORD, "keyword") .put(DatasetSearchParameter.LICENSE, "license") .put(DatasetSearchParameter.MODIFIED_DATE, "modified") .put(DatasetSearchParameter.PROJECT_ID, "project.identifier") .put(DatasetSearchParameter.PUBLISHING_ORG, "publishingOrganizationKey") .put(DatasetSearchParameter.RECORD_COUNT, "occurrenceCount") .put(DatasetSearchParameter.SUBTYPE, "subtype") .put(DatasetSearchParameter.TYPE, "type") .put(DatasetSearchParameter.DATASET_TITLE, "title") .put(DatasetSearchParameter.DOI, "doi") .put(DatasetSearchParameter.NETWORK_KEY, "networkKeys") .put(DatasetSearchParameter.INSTALLATION_KEY, "installationKey") .put(DatasetSearchParameter.ENDPOINT_TYPE, "endpoints.type") .build(); public static final Map<String, Integer> CARDINALITIES = ImmutableMap.<String, Integer>builder() .put("license", License.values().length) .put("country", Country.values().length) .put("publishingCountry", Country.values().length) .put("continent", Continent.values().length) .put("type", DatasetType.values().length) .put("subtype", DatasetSubtype.values().length) .put("endpoints.type", EndpointType.values().length) .build(); private static final String[] EXCLUDE_FIELDS = new String[] {"all"}; private static final String[] DATASET_TITLE_SUGGEST_FIELDS = new String[] {"title", "type", "subtype", "description"}; private static final String[] DATASET_HIGHLIGHT_FIELDS = new String[] {"title", "description"}; private static final FieldValueFactorFunctionBuilder FULLTEXT_SCORE_FUNCTION = ScoreFunctionBuilders.fieldValueFactorFunction("dataScore") .modifier(FieldValueFactorFunction.Modifier.LN2P) .missing(0d); private static final FieldSortBuilder[] SORT = new FieldSortBuilder[] { SortBuilders.fieldSort("dataScore").order(SortOrder.ASC), SortBuilders.fieldSort("created").order(SortOrder.DESC) }; public static final List<String> DATE_FIELDS = ImmutableList.of("modified", "created", "pubDate"); @Override public DatasetSearchParameter get(String esField) { return SEARCH_TO_ES_MAPPING.inverse().get(esField); } @Override public boolean isDateField(String esFieldName) { return DATE_FIELDS.contains(esFieldName); } @Override public Integer getCardinality(String esFieldName) { return CARDINALITIES.get(esFieldName); } @Override public String get(DatasetSearchParameter datasetSearchParameter) { return SEARCH_TO_ES_MAPPING.get(datasetSearchParameter); } @Override public String[] excludeFields() { return EXCLUDE_FIELDS; } @Override public SortBuilder<? extends SortBuilder>[] sorts() { return SORT; } @Override public String[] includeSuggestFields(DatasetSearchParameter searchParameter) { if (DatasetSearchParameter.DATASET_TITLE == searchParameter) { return DATASET_TITLE_SUGGEST_FIELDS; } return new String[] {SEARCH_TO_ES_MAPPING.get(searchParameter)}; } @Override public String[] highlightingFields() { return DATASET_HIGHLIGHT_FIELDS; } @Override public String[] getMappedFields() { return new String[] { "title", "type", "subtype", "description", "publishingOrganizationKey", "publishingOrganizationTitle", "hostingOrganizationKey", "hostingOrganizationTitle", "publishingCountry", "license", "projectId", "nameUsagesCount", "occurrenceCount", "keyword", "decade", "countryCoverage", "doi", "networkKeys", "networkTitle" }; } @Override public QueryBuilder fullTextQuery(String q) { return new FunctionScoreQueryBuilder( QueryBuilders.multiMatchQuery(q) .field("doi", 25.0f) .field("title", 20.0f) .field("keyword", 10.0f) .field("description", 8.0f) .field("publishingOrganizationTitle", 5.0f) .field("hostingOrganizationTitle", 5.0f) .field("networkTitle", 4.0f) .field("metadata", 3.0f) .field("projectId", 2.0f) .field("all", 1.0f) .tieBreaker(0.2f) .minimumShouldMatch("25%") .slop(100), FULLTEXT_SCORE_FUNCTION) .boostMode(CombineFunction.MULTIPLY); } }
registry-search/src/main/java/org/gbif/registry/search/dataset/DatasetEsFieldMapper.java
/* * Copyright 2020 Global Biodiversity Information Facility (GBIF) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gbif.registry.search.dataset; import org.gbif.api.model.registry.search.DatasetSearchParameter; import org.gbif.api.vocabulary.Continent; import org.gbif.api.vocabulary.Country; import org.gbif.api.vocabulary.DatasetSubtype; import org.gbif.api.vocabulary.DatasetType; import org.gbif.api.vocabulary.License; import org.gbif.registry.search.dataset.common.EsFieldMapper; import java.util.List; import java.util.Map; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.common.lucene.search.function.FieldValueFactorFunction; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.index.query.functionscore.FieldValueFactorFunctionBuilder; import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder; import org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders; import org.elasticsearch.search.sort.FieldSortBuilder; import org.elasticsearch.search.sort.SortBuilder; import org.elasticsearch.search.sort.SortBuilders; import org.elasticsearch.search.sort.SortOrder; import com.google.common.collect.ImmutableBiMap; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; public class DatasetEsFieldMapper implements EsFieldMapper<DatasetSearchParameter> { private static final ImmutableBiMap<DatasetSearchParameter, String> SEARCH_TO_ES_MAPPING = ImmutableBiMap.<DatasetSearchParameter, String>builder() .put(DatasetSearchParameter.TAXON_KEY, "taxonKey") .put(DatasetSearchParameter.CONTINENT, "continent") .put(DatasetSearchParameter.COUNTRY, "country") .put(DatasetSearchParameter.PUBLISHING_COUNTRY, "publishingCountry") .put(DatasetSearchParameter.YEAR, "year") .put(DatasetSearchParameter.DECADE, "decade") .put(DatasetSearchParameter.HOSTING_ORG, "hostingOrganizationKey") .put(DatasetSearchParameter.KEYWORD, "keyword") .put(DatasetSearchParameter.LICENSE, "license") .put(DatasetSearchParameter.MODIFIED_DATE, "modified") .put(DatasetSearchParameter.PROJECT_ID, "project.identifier") .put(DatasetSearchParameter.PUBLISHING_ORG, "publishingOrganizationKey") .put(DatasetSearchParameter.RECORD_COUNT, "occurrenceCount") .put(DatasetSearchParameter.SUBTYPE, "subtype") .put(DatasetSearchParameter.TYPE, "type") .put(DatasetSearchParameter.DATASET_TITLE, "title") .put(DatasetSearchParameter.DOI, "doi") .put(DatasetSearchParameter.NETWORK_KEY, "networkKeys") .build(); public static final Map<String, Integer> CARDINALITIES = ImmutableMap.<String, Integer>builder() .put("license", License.values().length) .put("country", Country.values().length) .put("publishingCountry", Country.values().length) .put("continent", Continent.values().length) .put("type", DatasetType.values().length) .put("subtype", DatasetSubtype.values().length) .build(); private static final String[] EXCLUDE_FIELDS = new String[] {"all"}; private static final String[] DATASET_TITLE_SUGGEST_FIELDS = new String[] {"title", "type", "subtype", "description"}; private static final String[] DATASET_HIGHLIGHT_FIELDS = new String[] {"title", "description"}; private static final FieldValueFactorFunctionBuilder FULLTEXT_SCORE_FUNCTION = ScoreFunctionBuilders.fieldValueFactorFunction("dataScore") .modifier(FieldValueFactorFunction.Modifier.LN2P) .missing(0d); private static final FieldSortBuilder[] SORT = new FieldSortBuilder[] { SortBuilders.fieldSort("dataScore").order(SortOrder.ASC), SortBuilders.fieldSort("created").order(SortOrder.DESC) }; public static final List<String> DATE_FIELDS = ImmutableList.of("modified", "created", "pubDate"); @Override public DatasetSearchParameter get(String esField) { return SEARCH_TO_ES_MAPPING.inverse().get(esField); } @Override public boolean isDateField(String esFieldName) { return DATE_FIELDS.contains(esFieldName); } @Override public Integer getCardinality(String esFieldName) { return CARDINALITIES.get(esFieldName); } @Override public String get(DatasetSearchParameter datasetSearchParameter) { return SEARCH_TO_ES_MAPPING.get(datasetSearchParameter); } @Override public String[] excludeFields() { return EXCLUDE_FIELDS; } @Override public SortBuilder<? extends SortBuilder>[] sorts() { return SORT; } @Override public String[] includeSuggestFields(DatasetSearchParameter searchParameter) { if (DatasetSearchParameter.DATASET_TITLE == searchParameter) { return DATASET_TITLE_SUGGEST_FIELDS; } return new String[] {SEARCH_TO_ES_MAPPING.get(searchParameter)}; } @Override public String[] highlightingFields() { return DATASET_HIGHLIGHT_FIELDS; } @Override public String[] getMappedFields() { return new String[] { "title", "type", "subtype", "description", "publishingOrganizationKey", "publishingOrganizationTitle", "hostingOrganizationKey", "hostingOrganizationTitle", "publishingCountry", "license", "projectId", "nameUsagesCount", "occurrenceCount", "keyword", "decade", "countryCoverage", "doi", "networkKeys", "networkTitle" }; } @Override public QueryBuilder fullTextQuery(String q) { return new FunctionScoreQueryBuilder( QueryBuilders.multiMatchQuery(q) .field("doi", 25.0f) .field("title", 20.0f) .field("keyword", 10.0f) .field("description", 8.0f) .field("publishingOrganizationTitle", 5.0f) .field("hostingOrganizationTitle", 5.0f) .field("networkTitle", 4.0f) .field("metadata", 3.0f) .field("projectId", 2.0f) .field("all", 1.0f) .tieBreaker(0.2f) .minimumShouldMatch("25%") .slop(100), FULLTEXT_SCORE_FUNCTION) .boostMode(CombineFunction.MULTIPLY); } }
https://github.com/gbif/registry/issues/148
registry-search/src/main/java/org/gbif/registry/search/dataset/DatasetEsFieldMapper.java
https://github.com/gbif/registry/issues/148
Java
apache-2.0
ee17a8df3c14abfc1675d3a9373740262cbed798
0
Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,serge-rider/dbeaver,Sargul/dbeaver,Sargul/dbeaver,dbeaver/dbeaver,dbeaver/dbeaver,serge-rider/dbeaver
/* * DBeaver - Universal Database Manager * Copyright (C) 2013-2015 Denis Forveille ([email protected]) * Copyright (C) 2010-2019 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.db2.model; import org.jkiss.code.NotNull; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.meta.Property; import java.nio.charset.StandardCharsets; import java.sql.ResultSet; /** * DB2 Package Statement * * @author Denis Forveille */ public class DB2PackageStatement extends DB2Object<DB2Package> { private static final int MAX_LENGTH_TEXT = 132; private Integer lineNumber; private String text; private String uniqueId; private String version; // ----------------------- // Constructors // ----------------------- public DB2PackageStatement(DB2Package db2Package, ResultSet resultSet) throws DBException { super(db2Package, String.valueOf(JDBCUtils.safeGetInteger(resultSet, "SECTNO")), true); this.lineNumber = JDBCUtils.safeGetInteger(resultSet, "STMTNO"); this.text = JDBCUtils.safeGetString(resultSet, "TEXT"); this.version = JDBCUtils.safeGetString(resultSet, "VERSION"); this.uniqueId = new String(JDBCUtils.safeGetBytes(resultSet, "UNIQUE_ID"), StandardCharsets.UTF_8); } // ----------------- // Properties // ----------------- @NotNull @Override @Property(viewable = true, order = 1) public String getName() { return super.getName(); } @Property(viewable = true, order = 2) public Integer getLineNumber() { return lineNumber; } @Property(viewable = true, order = 3) public String getUniqueId() { return uniqueId; } @Property(viewable = true, order = 4) public String getVersion() { return version; } @Property(viewable = true, order = 5) public String getTextPreview() { return text.substring(0, Math.min(MAX_LENGTH_TEXT, text.length())); } @Property(viewable = false, order = 6) public String getText() { return text; } }
plugins/org.jkiss.dbeaver.ext.db2/src/org/jkiss/dbeaver/ext/db2/model/DB2PackageStatement.java
/* * DBeaver - Universal Database Manager * Copyright (C) 2013-2015 Denis Forveille ([email protected]) * Copyright (C) 2010-2019 Serge Rider ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jkiss.dbeaver.ext.db2.model; import org.jkiss.code.NotNull; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils; import org.jkiss.dbeaver.model.meta.Property; import org.jkiss.dbeaver.utils.GeneralUtils; import java.io.UnsupportedEncodingException; import java.sql.ResultSet; /** * DB2 Package Statement * * @author Denis Forveille */ public class DB2PackageStatement extends DB2Object<DB2Package> { private static final int MAX_LENGTH_TEXT = 132; private Integer lineNumber; private String text; private String uniqueId; private String version; // ----------------------- // Constructors // ----------------------- public DB2PackageStatement(DB2Package db2Package, ResultSet resultSet) throws DBException { super(db2Package, String.valueOf(JDBCUtils.safeGetInteger(resultSet, "SECTNO")), true); this.lineNumber = JDBCUtils.safeGetInteger(resultSet, "STMTNO"); this.text = JDBCUtils.safeGetString(resultSet, "TEXT"); this.version = JDBCUtils.safeGetString(resultSet, "VERSION"); try { this.uniqueId = new String(JDBCUtils.safeGetBytes(resultSet, "UNIQUE_ID"), GeneralUtils.UTF8_ENCODING); } catch (UnsupportedEncodingException e) { } } // ----------------- // Properties // ----------------- @NotNull @Override @Property(viewable = true, order = 1) public String getName() { return super.getName(); } @Property(viewable = true, order = 2) public Integer getLineNumber() { return lineNumber; } @Property(viewable = true, order = 3) public String getUniqueId() { return uniqueId; } @Property(viewable = true, order = 4) public String getVersion() { return version; } @Property(viewable = true, order = 5) public String getTextPreview() { return text.substring(0, Math.min(MAX_LENGTH_TEXT, text.length())); } @Property(viewable = false, order = 6) public String getText() { return text; } }
Code cleanup Former-commit-id: 1649693fc0448cadeec43ec5fe5f89f1d713b811
plugins/org.jkiss.dbeaver.ext.db2/src/org/jkiss/dbeaver/ext/db2/model/DB2PackageStatement.java
Code cleanup
Java
apache-2.0
58cefb41610fe8a47606ff8f872c400b2439765d
0
AndrewKhitrin/dbeaver,ruspl-afed/dbeaver,liuyuanyuan/dbeaver,AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,ruspl-afed/dbeaver,ruspl-afed/dbeaver,AndrewKhitrin/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,liuyuanyuan/dbeaver,ruspl-afed/dbeaver,AndrewKhitrin/dbeaver
/* * Copyright (C) 2010-2013 Serge Rieder * [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jkiss.dbeaver.ui.controls.resultset; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.IJobChangeEvent; import org.eclipse.core.runtime.jobs.JobChangeAdapter; import org.eclipse.jface.action.*; import org.eclipse.jface.dialogs.ControlEnableState; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.text.IFindReplaceTarget; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.jface.viewers.*; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.dnd.TextTransfer; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.*; import org.eclipse.swt.graphics.*; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import org.eclipse.ui.ISaveablePart2; import org.eclipse.ui.IWorkbenchCommandConstants; import org.eclipse.ui.IWorkbenchPartSite; import org.eclipse.ui.commands.ICommandService; import org.eclipse.ui.menus.CommandContributionItem; import org.eclipse.ui.progress.UIJob; import org.eclipse.ui.themes.ITheme; import org.eclipse.ui.themes.IThemeManager; import org.eclipse.ui.views.properties.IPropertySheetPage; import org.eclipse.ui.views.properties.IPropertySource; import org.eclipse.ui.views.properties.IPropertySourceProvider; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.DBeaverPreferences; import org.jkiss.dbeaver.core.CoreMessages; import org.jkiss.dbeaver.core.DBeaverCore; import org.jkiss.dbeaver.core.DBeaverUI; import org.jkiss.dbeaver.ext.IDataSourceProvider; import org.jkiss.dbeaver.ext.ui.IObjectImageProvider; import org.jkiss.dbeaver.ext.ui.ITooltipProvider; import org.jkiss.dbeaver.model.DBPDataKind; import org.jkiss.dbeaver.model.DBPDataSource; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.data.*; import org.jkiss.dbeaver.model.exec.*; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress; import org.jkiss.dbeaver.model.struct.*; import org.jkiss.dbeaver.model.virtual.DBVConstants; import org.jkiss.dbeaver.model.virtual.DBVEntityConstraint; import org.jkiss.dbeaver.runtime.RunnableWithResult; import org.jkiss.dbeaver.runtime.RuntimeUtils; import org.jkiss.dbeaver.runtime.VoidProgressMonitor; import org.jkiss.dbeaver.tools.transfer.IDataTransferProducer; import org.jkiss.dbeaver.tools.transfer.database.DatabaseTransferProducer; import org.jkiss.dbeaver.tools.transfer.wizard.DataTransferWizard; import org.jkiss.dbeaver.ui.*; import org.jkiss.dbeaver.ui.controls.lightgrid.*; import org.jkiss.dbeaver.ui.controls.lightgrid.renderers.AbstractRenderer; import org.jkiss.dbeaver.ui.controls.lightgrid.renderers.DefaultRowHeaderRenderer; import org.jkiss.dbeaver.ui.controls.spreadsheet.ISpreadsheetController; import org.jkiss.dbeaver.ui.controls.spreadsheet.Spreadsheet; import org.jkiss.dbeaver.ui.dialogs.ActiveWizardDialog; import org.jkiss.dbeaver.ui.dialogs.ConfirmationDialog; import org.jkiss.dbeaver.ui.dialogs.EditTextDialog; import org.jkiss.dbeaver.ui.dialogs.sql.ViewSQLDialog; import org.jkiss.dbeaver.ui.dialogs.struct.EditConstraintDialog; import org.jkiss.dbeaver.ui.preferences.PrefPageDatabaseGeneral; import org.jkiss.dbeaver.ui.properties.PropertyCollector; import org.jkiss.dbeaver.ui.properties.tabbed.PropertyPageStandard; import org.jkiss.dbeaver.utils.ContentUtils; import org.jkiss.utils.CommonUtils; import java.lang.reflect.InvocationTargetException; import java.util.*; import java.util.List; /** * ResultSetViewer */ public class ResultSetViewer extends Viewer implements IDataSourceProvider, ISpreadsheetController, IPropertyChangeListener, ISaveablePart2, IAdaptable { static final Log log = LogFactory.getLog(ResultSetViewer.class); private static final int DEFAULT_ROW_HEADER_WIDTH = 50; private ResultSetValueController panelValueController; private static final String VIEW_PANEL_VISIBLE = "viewPanelVisible"; private static final String VIEW_PANEL_RATIO = "viewPanelRatio"; public enum GridMode { GRID, RECORD } public enum RowPosition { FIRST, PREVIOUS, NEXT, LAST } private final IWorkbenchPartSite site; private final Composite viewerPanel; private Composite filtersPanel; private ControlEnableState filtersEnableState; private Combo filtersText; private Text statusLabel; private final SashForm resultsSash; private final Spreadsheet spreadsheet; private final ViewValuePanel previewPane; private final ResultSetProvider resultSetProvider; private final ResultSetDataReceiver dataReceiver; private final IThemeManager themeManager; private ToolBarManager toolBarManager; // Current row/col number private int curRowNum = -1; private int curColNum = -1; // Mode private GridMode gridMode; private final Map<ResultSetValueController, DBDValueEditorStandalone> openEditors = new HashMap<ResultSetValueController, DBDValueEditorStandalone>(); private final List<ResultSetListener> listeners = new ArrayList<ResultSetListener>(); // UI modifiers private final Color colorRed; private Color backgroundAdded; private Color backgroundDeleted; private Color backgroundModified; private Color backgroundOdd; private final Color foregroundNull; private final Font boldFont; private volatile ResultSetDataPumpJob dataPumpJob; private ResultSetFindReplaceTarget findReplaceTarget; private final ResultSetModel model = new ResultSetModel(); private boolean showOddRows = true; private boolean showCelIcons = true; public ResultSetViewer(Composite parent, IWorkbenchPartSite site, ResultSetProvider resultSetProvider) { super(); /* if (!adapterRegistered) { ResultSetAdapterFactory nodesAdapter = new ResultSetAdapterFactory(); IAdapterManager mgr = Platform.getAdapterManager(); mgr.registerAdapters(nodesAdapter, ResultSetProvider.class); mgr.registerAdapters(nodesAdapter, IPageChangeProvider.class); adapterRegistered = true; } */ this.site = site; this.gridMode = GridMode.GRID; this.resultSetProvider = resultSetProvider; this.dataReceiver = new ResultSetDataReceiver(this); this.colorRed = Display.getDefault().getSystemColor(SWT.COLOR_RED); this.foregroundNull = parent.getDisplay().getSystemColor(SWT.COLOR_GRAY); this.boldFont = UIUtils.makeBoldFont(parent.getFont()); this.viewerPanel = UIUtils.createPlaceholder(parent, 1); UIUtils.setHelp(this.viewerPanel, IHelpContextIds.CTX_RESULT_SET_VIEWER); createFiltersPanel(); { resultsSash = new SashForm(viewerPanel, SWT.HORIZONTAL | SWT.SMOOTH); resultsSash.setLayoutData(new GridData(GridData.FILL_BOTH)); resultsSash.setSashWidth(5); //resultsSash.setBackground(resultsSash.getDisplay().getSystemColor(SWT.COLOR_WIDGET_BACKGROUND)); this.spreadsheet = new Spreadsheet( resultsSash, SWT.MULTI | SWT.VIRTUAL | SWT.H_SCROLL | SWT.V_SCROLL, site, this, new ContentProvider(), new ContentLabelProvider(), new ColumnLabelProvider(), new RowLabelProvider()); this.spreadsheet.setTopLeftRenderer(new TopLeftRenderer(this.spreadsheet)); this.spreadsheet.setLayoutData(new GridData(GridData.FILL_BOTH)); this.previewPane = new ViewValuePanel(resultsSash) { @Override protected void hidePanel() { togglePreview(); } }; final IPreferenceStore preferences = getPreferences(); int ratio = preferences.getInt(VIEW_PANEL_RATIO); boolean viewPanelVisible = preferences.getBoolean(VIEW_PANEL_VISIBLE); if (ratio <= 0) { ratio = 750; } resultsSash.setWeights(new int[]{ratio, 1000 - ratio}); if (!viewPanelVisible) { resultsSash.setMaximizedControl(spreadsheet); } previewPane.addListener(SWT.Resize, new Listener() { @Override public void handleEvent(Event event) { DBPDataSource dataSource = getDataSource(); if (dataSource != null) { if (!resultsSash.isDisposed()) { int[] weights = resultsSash.getWeights(); int ratio = weights[0]; preferences.setValue(VIEW_PANEL_RATIO, ratio); } } } }); } createStatusBar(viewerPanel); changeMode(GridMode.GRID); this.themeManager = site.getWorkbenchWindow().getWorkbench().getThemeManager(); this.themeManager.addPropertyChangeListener(this); this.spreadsheet.addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { dispose(); } }); this.spreadsheet.addCursorChangeListener(new Listener() { @Override public void handleEvent(Event event) { updateGridCursor(event.x, event.y); } }); //this.spreadsheet.setTopLeftRenderer(new TopLeftRenderer(this)); applyThemeSettings(); spreadsheet.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { updateToolbar(); } @Override public void focusLost(FocusEvent e) { updateToolbar(); } }); this.spreadsheet.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { fireSelectionChanged(new SelectionChangedEvent(ResultSetViewer.this, new ResultSetSelectionImpl())); } }); } //////////////////////////////////////////////////////////// // Filters private void createFiltersPanel() { filtersPanel = new Composite(viewerPanel, SWT.NONE); filtersPanel.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); GridLayout gl = new GridLayout(5, false); gl.marginHeight = 3; gl.marginWidth = 3; filtersPanel.setLayout(gl); Button sourceQueryButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); sourceQueryButton.setImage(DBIcon.SQL_TEXT.getImage()); sourceQueryButton.setText("SQL"); sourceQueryButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { String queryText = model.getStatistics() == null ? null : model.getStatistics().getQueryText(); if (queryText == null || queryText.isEmpty()) { queryText = "<empty>"; } ViewSQLDialog dialog = new ViewSQLDialog(site, getDataSource(), "Query Text", DBIcon.SQL_TEXT.getImage(), queryText); dialog.setEnlargeViewPanel(false); dialog.setWordWrap(true); dialog.open(); } }); Button customizeButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); customizeButton.setImage(DBIcon.FILTER.getImage()); customizeButton.setText("Filters"); customizeButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { new ResultSetFilterDialog(ResultSetViewer.this).open(); } }); //UIUtils.createControlLabel(filtersPanel, " Filter"); this.filtersText = new Combo(filtersPanel, SWT.BORDER | SWT.DROP_DOWN); this.filtersText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); this.filtersText.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { setCustomDataFilter(); } }); { // Register filters text in focus service UIUtils.addFocusTracker(site, UIUtils.INLINE_WIDGET_EDITOR_ID, this.filtersText); this.filtersText.addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { // Unregister from focus service UIUtils.removeFocusTracker(ResultSetViewer.this.site, filtersText); dispose(); } }); } // Handle all shortcuts by filters editor, not by host editor this.filtersText.addFocusListener(new FocusListener() { private boolean activated = false; @Override public void focusGained(FocusEvent e) { if (!activated) { UIUtils.enableHostEditorKeyBindings(site, false); activated = true; } } @Override public void focusLost(FocusEvent e) { if (activated) { UIUtils.enableHostEditorKeyBindings(site, true); activated = false; } } }); final Button applyButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); applyButton.setText("Apply"); applyButton.setToolTipText("Apply filter criteria"); applyButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { setCustomDataFilter(); } }); applyButton.setEnabled(false); final Button clearButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); clearButton.setText("X"); clearButton.setToolTipText("Remove all filters"); clearButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { resetDataFilter(true); } }); clearButton.setEnabled(false); this.filtersText.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { if (filtersEnableState == null) { String filterText = filtersText.getText(); applyButton.setEnabled(true); clearButton.setEnabled(!CommonUtils.isEmpty(filterText)); } } }); filtersPanel.addTraverseListener(new TraverseListener() { @Override public void keyTraversed(TraverseEvent e) { if (e.detail == SWT.TRAVERSE_RETURN) { setCustomDataFilter(); e.doit = false; e.detail = SWT.TRAVERSE_NONE; } } }); filtersEnableState = ControlEnableState.disable(filtersPanel); } public void resetDataFilter(boolean refresh) { setDataFilter(model.createDataFilter(), refresh); } private void setCustomDataFilter() { DBPDataSource dataSource = getDataSource(); if (dataSource == null) { return; } String condition = filtersText.getText(); StringBuilder currentCondition = new StringBuilder(); model.getDataFilter().appendConditionString(dataSource, currentCondition); if (currentCondition.toString().trim().equals(condition.trim())) { // The same return; } DBDDataFilter newFilter = model.createDataFilter(); newFilter.setWhere(condition); setDataFilter(newFilter, true); spreadsheet.setFocus(); } public void updateFiltersText() { boolean enableFilters = false; DBPDataSource dataSource = getDataSource(); if (dataSource != null) { StringBuilder where = new StringBuilder(); model.getDataFilter().appendConditionString(dataSource, where); String whereCondition = where.toString().trim(); filtersText.setText(whereCondition); if (!whereCondition.isEmpty()) { addFiltersHistory(whereCondition); } if (resultSetProvider.isReadyToRun() && !model.isUpdateInProgress() && (!CommonUtils.isEmpty(whereCondition) || (getModel().getVisibleColumnCount() > 0 && supportsDataFilter()))) { enableFilters = true; } } if (enableFilters) { if (filtersEnableState != null) { filtersEnableState.restore(); filtersEnableState = null; } } else if (filtersEnableState == null) { filtersEnableState = ControlEnableState.disable(filtersPanel); } } private void addFiltersHistory(String whereCondition) { int historyCount = filtersText.getItemCount(); for (int i = 0; i < historyCount; i++) { if (filtersText.getItem(i).equals(whereCondition)) { if (i > 0) { // Move to beginning filtersText.remove(i); break; } else { return; } } } filtersText.add(whereCondition, 0); filtersText.setText(whereCondition); } public void setDataFilter(final DBDDataFilter dataFilter, boolean refreshData) { if (!CommonUtils.equalObjects(model.getDataFilter(), dataFilter)) { if (model.setDataFilter(dataFilter)) { refreshSpreadsheet(true, true); } if (refreshData) { reorderResultSet(true, new Runnable() { @Override public void run() { resetColumnOrdering(); } }); } } this.updateFiltersText(); } //////////////////////////////////////////////////////////// // Misc IPreferenceStore getPreferences() { return DBeaverCore.getGlobalPreferenceStore(); } @Override public DBPDataSource getDataSource() { DBSDataContainer dataContainer = getDataContainer(); return dataContainer == null ? null : dataContainer.getDataSource(); } public IFindReplaceTarget getFindReplaceTarget() { if (findReplaceTarget == null) { findReplaceTarget = new ResultSetFindReplaceTarget(this); } return findReplaceTarget; } @Nullable @Override public Object getAdapter(Class adapter) { if (adapter == IPropertySheetPage.class) { // Show cell properties PropertyPageStandard page = new PropertyPageStandard(); page.setPropertySourceProvider(new IPropertySourceProvider() { @Nullable @Override public IPropertySource getPropertySource(Object object) { if (object instanceof GridPos) { final GridPos cell = translateVisualPos((GridPos) object); if (isValidCell(cell)) { final ResultSetValueController valueController = new ResultSetValueController( cell, DBDValueController.EditType.NONE, null); PropertyCollector props = new PropertyCollector(valueController.getAttribute(), false); props.collectProperties(); valueController.getValueHandler().contributeProperties(props, valueController); return props; } } return null; } }); return page; } else if (adapter == IFindReplaceTarget.class) { return getFindReplaceTarget(); } return null; } public void addListener(ResultSetListener listener) { synchronized (listeners) { listeners.add(listener); } } public void removeListener(ResultSetListener listener) { synchronized (listeners) { listeners.remove(listener); } } private void updateGridCursor(int col, int row) { boolean changed; if (gridMode == GridMode.GRID) { changed = curRowNum != row || curColNum != col; curRowNum = row; curColNum = col; } else { changed = curColNum != row; curColNum = row; } if (changed) { ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_CAN_MOVE); ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_EDITABLE); updateToolbar(); if (col >= 0 && row >= 0) { previewValue(); } } } private void updateRecordMode() { int oldColNum = this.curColNum; this.initResultSet(); this.curColNum = oldColNum; spreadsheet.setCursor(new GridPos(0, oldColNum), false); } void updateEditControls() { ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_EDITABLE); ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_CHANGED); updateToolbar(); } /** * It is a hack function. Generally all command associated widgets should be updated automatically by framework. * Freaking E4 do not do it. I've spent a couple of days fighting it. Guys, you owe me. * TODO: just remove in future. In fact everything must work without it */ private void updateToolbar() { if (toolBarManager.isEmpty()) { return; } for (IContributionItem item : toolBarManager.getItems()) { item.update(); } } void refreshSpreadsheet(boolean columnsChanged, boolean rowsChanged) { if (spreadsheet.isDisposed()) { return; } if (rowsChanged) { if (curRowNum >= model.getRowCount()) { curRowNum = model.getRowCount() - 1; } GridPos curPos = new GridPos(spreadsheet.getCursorPosition()); if (gridMode == GridMode.GRID) { if (curPos.row >= model.getRowCount()) { curPos.row = model.getRowCount() - 1; } } this.spreadsheet.reinitState(columnsChanged); // Set cursor on new row if (gridMode == GridMode.GRID) { spreadsheet.setCursor(curPos, false); } else { updateRecordMode(); } } else { this.spreadsheet.redrawGrid(); } } private void createStatusBar(Composite parent) { UIUtils.createHorizontalLine(parent); Composite statusBar = new Composite(parent, SWT.NONE); GridData gd = new GridData(GridData.FILL_HORIZONTAL); statusBar.setLayoutData(gd); GridLayout gl = new GridLayout(4, false); gl.marginWidth = 0; gl.marginHeight = 3; //gl.marginBottom = 5; statusBar.setLayout(gl); statusLabel = new Text(statusBar, SWT.READ_ONLY); gd = new GridData(GridData.FILL_HORIZONTAL); statusLabel.setLayoutData(gd); statusLabel.addMouseListener(new MouseAdapter() { @Override public void mouseDoubleClick(MouseEvent e) { EditTextDialog.showText(site.getShell(), CoreMessages.controls_resultset_viewer_dialog_status_title, statusLabel.getText()); } }); /* IAction viewMessageAction = new Action("View status message", DBIcon.TREE_INFO.getImageDescriptor()) { public void run() { } }; */ toolBarManager = new ToolBarManager(SWT.FLAT | SWT.HORIZONTAL); // handle own commands toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_APPLY_CHANGES)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_REJECT_CHANGES)); toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_EDIT)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_ADD)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_COPY)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_DELETE)); toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_FIRST)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_PREVIOUS)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_NEXT)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_LAST)); toolBarManager.add(new Separator()); // Link to standard Find/Replace action - it has to be handled by owner site toolBarManager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_FIND_AND_REPLACE, CommandContributionItem.STYLE_PUSH, DBIcon.FIND_TEXT.getImageDescriptor())); // Use simple action for refresh to avoid ambiguous behaviour of F5 shortcut //toolBarManager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.FILE_REFRESH, "Refresh result set", DBIcon.RS_REFRESH.getImageDescriptor())); Action refreshAction = new Action(CoreMessages.controls_resultset_viewer_action_refresh, DBIcon.RS_REFRESH.getImageDescriptor()) { @Override public void run() { refresh(); } }; toolBarManager.add(refreshAction); toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_MODE, CommandContributionItem.STYLE_CHECK)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_PREVIEW, CommandContributionItem.STYLE_CHECK)); toolBarManager.add(new ConfigAction()); toolBarManager.createControl(statusBar); //updateEditControls(); } public Spreadsheet getSpreadsheet() { return spreadsheet; } public DBSDataContainer getDataContainer() { return resultSetProvider.getDataContainer(); } // Update all columns ordering private void resetColumnOrdering() { if (!spreadsheet.isDisposed() && gridMode == GridMode.GRID) { List<DBDAttributeBinding> visibleColumns = model.getVisibleColumns(); for (int i = 0, metaColumnsLength = visibleColumns.size(); i < metaColumnsLength; i++) { DBDAttributeBinding column = visibleColumns.get(i); DBDAttributeConstraint constraint = model.getDataFilter().getConstraint(column); GridColumn gridColumn = spreadsheet.getColumn(i); if (constraint == null || constraint.getOrderPosition() == 0) { gridColumn.setSort(SWT.DEFAULT); } else { gridColumn.setSort(constraint.isOrderDescending() ? SWT.UP : SWT.DOWN); } } spreadsheet.redrawGrid(); } } //////////////////////////////////////////////////////////// // Grid/Record mode public GridMode getGridMode() { return gridMode; } public void toggleMode() { changeMode(gridMode == GridMode.GRID ? GridMode.RECORD : GridMode.GRID); // Refresh elements ICommandService commandService = (ICommandService) site.getService(ICommandService.class); if (commandService != null) { commandService.refreshElements(ResultSetCommandHandler.CMD_TOGGLE_MODE, null); } } private void changeMode(GridMode gridMode) { int oldRowNum = this.curRowNum, oldColNum = this.curColNum; int rowCount = model.getRowCount(); if (rowCount > 0) { // Fix row number if needed if (oldRowNum < 0) { oldRowNum = this.curRowNum = 0; } else if (oldRowNum >= rowCount) { oldRowNum = this.curRowNum = rowCount - 1; } } this.gridMode = gridMode; if (this.gridMode == GridMode.GRID) { this.spreadsheet.setRowHeaderWidth(DEFAULT_ROW_HEADER_WIDTH); this.initResultSet(); } else { this.resetRecordHeaderWidth(); this.updateRecordMode(); } if (gridMode == GridMode.GRID) { if (oldRowNum >= 0 && oldRowNum < spreadsheet.getItemCount()) { spreadsheet.setCursor(new GridPos(oldColNum, oldRowNum), false); } } else { if (oldColNum >= 0) { spreadsheet.setCursor(new GridPos(0, oldColNum), false); } } spreadsheet.layout(true, true); previewValue(); } private void resetRecordHeaderWidth() { // Calculate width of spreadsheet panel - use longest column title int defaultWidth = 0; GC gc = new GC(spreadsheet); gc.setFont(spreadsheet.getFont()); for (DBDAttributeBinding column : model.getVisibleColumns()) { Point ext = gc.stringExtent(column.getAttributeName()); if (ext.x > defaultWidth) { defaultWidth = ext.x; } } defaultWidth += DBIcon.EDIT_COLUMN.getImage().getBounds().width + 2; spreadsheet.setRowHeaderWidth(defaultWidth + DEFAULT_ROW_HEADER_WIDTH); } //////////////////////////////////////////////////////////// // Value preview public boolean isPreviewVisible() { return resultsSash.getMaximizedControl() == null; } public void togglePreview() { if (resultsSash.getMaximizedControl() == null) { resultsSash.setMaximizedControl(spreadsheet); } else { resultsSash.setMaximizedControl(null); previewValue(); } getPreferences().setValue(VIEW_PANEL_VISIBLE, isPreviewVisible()); // Refresh elements ICommandService commandService = (ICommandService) site.getService(ICommandService.class); if (commandService != null) { commandService.refreshElements(ResultSetCommandHandler.CMD_TOGGLE_PREVIEW, null); } } void previewValue() { if (!isPreviewVisible()) { return; } GridPos cell = getCurrentPosition(); if (!isValidCell(cell)) { previewPane.clearValue(); return; } cell = translateVisualPos(getCurrentPosition()); if (panelValueController == null || panelValueController.pos.col != cell.col) { panelValueController = new ResultSetValueController( cell, DBDValueController.EditType.PANEL, previewPane.getViewPlaceholder()); } else { panelValueController.setCurRow(model.getRowData(cell.row)); } previewPane.viewValue(panelValueController); } //////////////////////////////////////////////////////////// // Misc private void dispose() { closeEditors(); clearData(); themeManager.removePropertyChangeListener(ResultSetViewer.this); UIUtils.dispose(this.boldFont); if (toolBarManager != null) { try { toolBarManager.dispose(); } catch (Throwable e) { // ignore log.debug("Error disposing toolbar", e); } } } private void applyThemeSettings() { ITheme currentTheme = themeManager.getCurrentTheme(); Font rsFont = currentTheme.getFontRegistry().get(ThemeConstants.FONT_SQL_RESULT_SET); if (rsFont != null) { this.spreadsheet.setFont(rsFont); } Color selBackColor = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_SET_SELECTION_BACK); if (selBackColor != null) { this.spreadsheet.setBackgroundSelected(selBackColor); } Color selForeColor = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_SET_SELECTION_FORE); if (selForeColor != null) { this.spreadsheet.setForegroundSelected(selForeColor); } Color previewBack = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_SET_PREVIEW_BACK); if (previewBack != null) { this.previewPane.getViewPlaceholder().setBackground(previewBack); for (Control control : this.previewPane.getViewPlaceholder().getChildren()) { control.setBackground(previewBack); } } this.backgroundAdded = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_NEW_BACK); this.backgroundDeleted = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_DELETED_BACK); this.backgroundModified = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_MODIFIED_BACK); this.backgroundOdd = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_ODD_BACK); } @Override public void propertyChange(PropertyChangeEvent event) { if (event.getProperty().startsWith(ThemeConstants.RESULTS_PROP_PREFIX)) { applyThemeSettings(); } } void scrollToRow(RowPosition position) { switch (position) { case FIRST: if (gridMode == GridMode.RECORD) { curRowNum = 0; updateRecordMode(); } else { spreadsheet.shiftCursor(0, -spreadsheet.getItemCount(), false); } break; case PREVIOUS: if (gridMode == GridMode.RECORD && curRowNum > 0) { curRowNum--; updateRecordMode(); } else { spreadsheet.shiftCursor(0, -1, false); } break; case NEXT: if (gridMode == GridMode.RECORD && curRowNum < model.getRowCount() - 1) { curRowNum++; updateRecordMode(); } else { spreadsheet.shiftCursor(0, 1, false); } break; case LAST: if (gridMode == GridMode.RECORD && model.getRowCount() > 0) { curRowNum = model.getRowCount() - 1; updateRecordMode(); } else { spreadsheet.shiftCursor(0, spreadsheet.getItemCount(), false); } break; } } boolean isColumnReadOnly(GridPos pos) { int column; if (gridMode == GridMode.GRID) { column = pos.col; } else { column = pos.row; } return isReadOnly() || model.isColumnReadOnly(column); } boolean isColumnReadOnly(DBDAttributeBinding column) { return isReadOnly() || model.isColumnReadOnly(column); } public int getCurrentRow() { return gridMode == GridMode.GRID ? spreadsheet.getCurrentRow() : curRowNum; } public GridPos getCurrentPosition() { return spreadsheet.getCursorPosition(); } public void setStatus(String status) { setStatus(status, false); } public void setStatus(String status, boolean error) { if (statusLabel.isDisposed()) { return; } if (error) { statusLabel.setForeground(colorRed); } else { statusLabel.setForeground(null); } if (status == null) { status = "???"; //$NON-NLS-1$ } statusLabel.setText(status); } public void updateStatusMessage() { if (model.getRowCount() == 0) { if (model.getVisibleColumnCount() == 0) { setStatus(CoreMessages.controls_resultset_viewer_status_empty + getExecutionTimeMessage()); } else { setStatus(CoreMessages.controls_resultset_viewer_status_no_data + getExecutionTimeMessage()); } } else { if (gridMode == GridMode.RECORD) { this.resetRecordHeaderWidth(); setStatus(CoreMessages.controls_resultset_viewer_status_row + (curRowNum + 1) + "/" + model.getRowCount() + getExecutionTimeMessage()); } else { setStatus(String.valueOf(model.getRowCount()) + CoreMessages.controls_resultset_viewer_status_rows_fetched + getExecutionTimeMessage()); } } } private String getExecutionTimeMessage() { DBCStatistics statistics = model.getStatistics(); if (statistics == null || statistics.isEmpty()) { return ""; } return " - " + RuntimeUtils.formatExecutionTime(statistics.getTotalTime()); } /** * Sets new metadata of result set * @param columns columns metadata * @return true if new metadata differs from old one, false otherwise */ public boolean setMetaData(DBDAttributeBinding[] columns) { if (model.setMetaData(columns)) { this.panelValueController = null; return true; } return false; } public void setData(List<Object[]> rows, boolean updateMetaData) { if (spreadsheet.isDisposed()) { return; } // Clear previous data this.closeEditors(); model.setData(rows, updateMetaData); if (updateMetaData) { if (getPreferenceStore().getBoolean(DBeaverPreferences.RESULT_SET_AUTO_SWITCH_MODE)) { GridMode newMode = (rows.size() == 1) ? GridMode.RECORD : GridMode.GRID; if (newMode != gridMode) { toggleMode(); // ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_CAN_TOGGLE); } } this.initResultSet(); } else { this.refreshSpreadsheet(updateMetaData, true); } updateEditControls(); } public void appendData(List<Object[]> rows) { model.appendData(rows); //refreshSpreadsheet(true); spreadsheet.refreshData(false); setStatus(NLS.bind(CoreMessages.controls_resultset_viewer_status_rows_size, model.getRowCount(), rows.size()) + getExecutionTimeMessage()); updateEditControls(); } private void closeEditors() { List<DBDValueEditorStandalone> editors = new ArrayList<DBDValueEditorStandalone>(openEditors.values()); for (DBDValueEditorStandalone editor : editors) { editor.closeValueEditor(); } if (!openEditors.isEmpty()) { log.warn("Some value editors are still registered at result set: " + openEditors.size()); } openEditors.clear(); } private void initResultSet() { spreadsheet.setRedraw(false); try { spreadsheet.clearGrid(); if (gridMode == GridMode.RECORD) { this.resetRecordHeaderWidth(); } spreadsheet.reinitState(true); } finally { spreadsheet.setRedraw(true); } this.updateFiltersText(); this.updateStatusMessage(); } @Override public int promptToSaveOnClose() { if (!isDirty()) { return ISaveablePart2.YES; } int result = ConfirmationDialog.showConfirmDialog( spreadsheet.getShell(), DBeaverPreferences.CONFIRM_RS_EDIT_CLOSE, ConfirmationDialog.QUESTION_WITH_CANCEL); if (result == IDialogConstants.YES_ID) { return ISaveablePart2.YES; } else if (result == IDialogConstants.NO_ID) { rejectChanges(); return ISaveablePart2.NO; } else { return ISaveablePart2.CANCEL; } } @Override public void doSave(IProgressMonitor monitor) { applyChanges(RuntimeUtils.makeMonitor(monitor)); } @Override public void doSaveAs() { } @Override public boolean isDirty() { return model.isDirty(); } @Override public boolean isSaveAsAllowed() { return false; } @Override public boolean isSaveOnCloseNeeded() { return true; } @Override public boolean hasData() { return model.hasData(); } @Override public boolean isReadOnly() { if (model.isUpdateInProgress()) { return true; } DBSDataContainer dataContainer = getDataContainer(); if (dataContainer == null) { return true; } DBPDataSource dataSource = dataContainer.getDataSource(); return !dataSource.isConnected() || dataSource.getContainer().isConnectionReadOnly() || dataSource.getInfo().isReadOnlyData(); } /** * Checks that specified visual position is valid * @param pos visual grid position * @return true if position is valid */ @Override public boolean isValidCell(GridPos pos) { if (pos == null) { return false; } if (gridMode == GridMode.GRID) { return pos.row >= 0 && pos.row < model.getRowCount() && pos.col >= 0 && pos.col < model.getVisibleColumnCount(); } else { return curRowNum >= 0 && curRowNum < model.getRowCount() && pos.row >= 0; } } /** * Translated visual grid position into model cell position. * Check for grid mode (grid/record) and columns reordering/hiding * @param pos visual position * @return model position */ GridPos translateVisualPos(GridPos pos) { if (gridMode == GridMode.GRID) { DBDAttributeBinding column = model.getVisibleColumn(pos.col); if (column.getAttributeIndex() == pos.col) { return pos; } else { return new GridPos(column.getAttributeIndex(), pos.row); } } else { DBDAttributeBinding column = model.getVisibleColumn(pos.row); return new GridPos(column.getAttributeIndex(), curRowNum); } } /** * Checks that current state of result set allows to insert new rows * @return true if new rows insert is allowed */ @Override public boolean isInsertable() { return !isReadOnly() && model.isSingleSource() && model.getVisibleColumnCount() > 0; } @Nullable @Override public Control showCellEditor( final boolean inline) { // The control that will be the editor must be a child of the Table final GridPos focusCell = spreadsheet.getFocusCell(); //GridPos pos = getPosFromPoint(event.x, event.y); if (focusCell == null || focusCell.row < 0 || focusCell.col < 0) { return null; } if (!isValidCell(focusCell)) { // Out of bounds log.debug("Editor position is out of bounds (" + focusCell.col + ":" + focusCell.row + ")"); return null; } GridPos cell = translateVisualPos(focusCell); if (!inline) { for (ResultSetValueController valueController : openEditors.keySet()) { GridPos cellPos = valueController.getCellPos(); if (cellPos != null && cellPos.equalsTo(cell)) { openEditors.get(valueController).showValueEditor(); return null; } } } DBDAttributeBinding metaColumn = model.getColumn(cell.col); final int handlerFeatures = metaColumn.getValueHandler().getFeatures(); if (handlerFeatures == DBDValueHandler.FEATURE_NONE) { return null; } if (inline && (handlerFeatures & DBDValueHandler.FEATURE_INLINE_EDITOR) == 0 && (handlerFeatures & DBDValueHandler.FEATURE_VIEWER) != 0) { // Inline editor isn't supported but panel viewer is // Enable panel if (!isPreviewVisible()) { togglePreview(); } return null; } if (isColumnReadOnly(metaColumn) && inline) { // No inline editors for readonly columns return null; } Composite placeholder = null; if (inline) { if (isReadOnly()) { return null; } spreadsheet.cancelInlineEditor(); placeholder = new Composite(spreadsheet, SWT.NONE); placeholder.setFont(spreadsheet.getFont()); placeholder.setLayout(new FillLayout()); GridData gd = new GridData(GridData.FILL_BOTH); gd.horizontalIndent = 0; gd.verticalIndent = 0; gd.grabExcessHorizontalSpace = true; gd.grabExcessVerticalSpace = true; placeholder.setLayoutData(gd); } ResultSetValueController valueController = new ResultSetValueController( cell, inline ? DBDValueController.EditType.INLINE : DBDValueController.EditType.EDITOR, inline ? placeholder : null); final DBDValueEditor editor; try { editor = metaColumn.getValueHandler().createEditor(valueController); } catch (Exception e) { UIUtils.showErrorDialog(site.getShell(), "Cannot edit value", null, e); return null; } if (editor instanceof DBDValueEditorStandalone) { valueController.registerEditor((DBDValueEditorStandalone)editor); // show dialog in separate job to avoid block new UIJob("Open separate editor") { @Override public IStatus runInUIThread(IProgressMonitor monitor) { ((DBDValueEditorStandalone)editor).showValueEditor(); return Status.OK_STATUS; } }.schedule(); //((DBDValueEditorStandalone)editor).showValueEditor(); } else { // Set editable value if (editor != null) { try { editor.primeEditorValue(valueController.getValue()); } catch (DBException e) { log.error(e); } } } if (inline) { if (editor != null) { spreadsheet.showCellEditor(focusCell, placeholder); return editor.getControl(); } else { // No editor was created so just drop placeholder placeholder.dispose(); // Probably we can just show preview panel if ((handlerFeatures & DBDValueHandler.FEATURE_VIEWER) != 0) { // Inline editor isn't supported but panel viewer is // Enable panel if (!isPreviewVisible()) { togglePreview(); } return null; } } } return null; } @Override public void resetCellValue(GridPos cell, boolean delete) { cell = translateVisualPos(cell); model.resetCellValue(cell, delete); spreadsheet.redrawGrid(); updateEditControls(); previewValue(); } @Override public void fillContextMenu(GridPos curCell, IMenuManager manager) { // Custom oldValue items if (isValidCell(curCell)) { final GridPos cell = translateVisualPos(curCell); final ResultSetValueController valueController = new ResultSetValueController( cell, DBDValueController.EditType.NONE, null); final Object value = valueController.getValue(); // Standard items manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_CUT)); manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_COPY)); manager.add(ActionUtils.makeCommandContribution(site, ICommandIds.CMD_COPY_SPECIAL)); manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_PASTE)); manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_DELETE)); // Edit items manager.add(new Separator()); manager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_EDIT)); manager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_EDIT_INLINE)); if (!valueController.isReadOnly() && !DBUtils.isNullValue(value)) { manager.add(new Action(CoreMessages.controls_resultset_viewer_action_set_to_null) { @Override public void run() { valueController.updateValue( DBUtils.makeNullValue(valueController)); } }); } if (model.isCellModified(cell)) { Action resetValueAction = new Action(CoreMessages.controls_resultset_viewer_action_reset_value) { @Override public void run() { resetCellValue(cell, false); } }; resetValueAction.setAccelerator(SWT.ESC); manager.add(resetValueAction); } // Menus from value handler try { manager.add(new Separator()); model.getColumn(cell.col).getValueHandler().contributeActions(manager, valueController); } catch (Exception e) { log.error(e); } } if (curCell.col >= 0 && model.getVisibleColumnCount() > 0 && !model.isUpdateInProgress()) { // Export and other utility methods manager.add(new Separator()); MenuManager filtersMenu = new MenuManager( CoreMessages.controls_resultset_viewer_action_order_filter, DBIcon.FILTER.getImageDescriptor(), "filters"); //$NON-NLS-1$ filtersMenu.setRemoveAllWhenShown(true); filtersMenu.addMenuListener(new IMenuListener() { @Override public void menuAboutToShow(IMenuManager manager) { fillFiltersMenu(manager); } }); manager.add(filtersMenu); manager.add(new Action(CoreMessages.controls_resultset_viewer_action_export, DBIcon.EXPORT.getImageDescriptor()) { @Override public void run() { ActiveWizardDialog dialog = new ActiveWizardDialog( site.getWorkbenchWindow(), new DataTransferWizard( new IDataTransferProducer[] { new DatabaseTransferProducer(getDataContainer(), model.getDataFilter())}, null), getSelection()); dialog.open(); } }); } manager.add(new GroupMarker(ICommandIds.GROUP_TOOLS)); } private void fillFiltersMenu(IMenuManager filtersMenu) { GridPos currentPosition = getCurrentPosition(); int columnIndex = translateVisualPos(currentPosition).col; if (supportsDataFilter() && columnIndex >= 0) { DBDAttributeBinding column = model.getColumn(columnIndex); DBPDataKind dataKind = column.getMetaAttribute().getDataKind(); if (!column.getMetaAttribute().isRequired()) { filtersMenu.add(new FilterByColumnAction("IS NULL", FilterByColumnType.NONE, column)); filtersMenu.add(new FilterByColumnAction("IS NOT NULL", FilterByColumnType.NONE, column)); } for (FilterByColumnType type : FilterByColumnType.values()) { if (type == FilterByColumnType.NONE || (type == FilterByColumnType.VALUE && !isValidCell(currentPosition))) { // Value filters are available only if certain cell is selected continue; } filtersMenu.add(new Separator()); if (type.getValue(this, column, true, DBDDisplayFormat.NATIVE) == null) { continue; } if (dataKind == DBPDataKind.BOOLEAN) { filtersMenu.add(new FilterByColumnAction("= ?", type, column)); filtersMenu.add(new FilterByColumnAction("<> ?", type, column)); } else if (dataKind == DBPDataKind.NUMERIC || dataKind == DBPDataKind.DATETIME) { filtersMenu.add(new FilterByColumnAction("= ?", type, column)); filtersMenu.add(new FilterByColumnAction("<> ?", type, column)); filtersMenu.add(new FilterByColumnAction("> ?", type, column)); filtersMenu.add(new FilterByColumnAction("< ?", type, column)); } else if (dataKind == DBPDataKind.STRING) { filtersMenu.add(new FilterByColumnAction("= '?'", type, column)); filtersMenu.add(new FilterByColumnAction("<> '?'", type, column)); filtersMenu.add(new FilterByColumnAction("> '?'", type, column)); filtersMenu.add(new FilterByColumnAction("< '?'", type, column)); filtersMenu.add(new FilterByColumnAction("LIKE '%?%'", type, column)); filtersMenu.add(new FilterByColumnAction("NOT LIKE '%?%'", type, column)); } } filtersMenu.add(new Separator()); if (!CommonUtils.isEmpty(model.getDataFilter().getConstraint(column).getCriteria())) { filtersMenu.add(new FilterResetColumnAction(column)); } } { final List<GridColumn> selectedColumns = getSpreadsheet().getSelectedColumns(); if (getGridMode() == GridMode.GRID && !selectedColumns.isEmpty()) { String hideTitle; if (selectedColumns.size() == 1) { DBDAttributeBinding columnToHide = model.getColumn(translateVisualPos( new GridPos(selectedColumns.get(0).getIndex(), -1)).col); hideTitle = "Hide column '" + columnToHide.getAttributeName() + "'"; } else { hideTitle = "Hide selected columns (" + selectedColumns.size() + ")"; } filtersMenu.add(new Action(hideTitle) { @Override public void run() { if (selectedColumns.size() >= getModel().getVisibleColumnCount()) { UIUtils.showMessageBox(getControl().getShell(), "Hide columns", "Can't hide all result columns, at least one column must be visible", SWT.ERROR); } else { int[] columnIndexes = new int[selectedColumns.size()]; for (int i = 0, selectedColumnsSize = selectedColumns.size(); i < selectedColumnsSize; i++) { columnIndexes[i] = selectedColumns.get(i).getIndex(); } Arrays.sort(columnIndexes); for (int i = columnIndexes.length; i > 0; i--) { getModel().setColumnVisibility(getModel().getVisibleColumn(columnIndexes[i - 1]), false); } refreshSpreadsheet(true, true); } } }); } } filtersMenu.add(new Separator()); filtersMenu.add(new ToggleServerSideOrderingAction()); filtersMenu.add(new ShowFiltersAction()); } boolean supportsDataFilter() { return (getDataContainer().getSupportedFeatures() & DBSDataContainer.DATA_FILTER) == DBSDataContainer.DATA_FILTER; } @Override public void changeSorting(final GridColumn column, final int state) { DBDDataFilter dataFilter = model.getDataFilter(); boolean ctrlPressed = (state & SWT.CTRL) == SWT.CTRL; boolean altPressed = (state & SWT.ALT) == SWT.ALT; if (ctrlPressed) { dataFilter.resetOrderBy(); } DBDAttributeBinding metaColumn = model.getVisibleColumn(column.getIndex()); DBDAttributeConstraint constraint = dataFilter.getConstraint(metaColumn); //int newSort; if (constraint.getOrderPosition() == 0) { if (isServerSideFiltering() && supportsDataFilter()) { if (!ConfirmationDialog.confirmActionWithParams( spreadsheet.getShell(), DBeaverPreferences.CONFIRM_ORDER_RESULTSET, metaColumn.getAttributeName())) { return; } } constraint.setOrderPosition(dataFilter.getMaxOrderingPosition() + 1); constraint.setOrderDescending(altPressed); } else if (!constraint.isOrderDescending()) { constraint.setOrderDescending(true); } else { for (DBDAttributeConstraint con2 : dataFilter.getConstraints()) { if (con2.getOrderPosition() > constraint.getOrderPosition()) { con2.setOrderPosition(con2.getOrderPosition() - 1); } } constraint.setOrderPosition(0); constraint.setOrderDescending(false); } // Reorder // Use forced reorder if we just removed ordering on some column reorderResultSet(constraint.getOrderPosition() == 0, new Runnable() { @Override public void run() { resetColumnOrdering(); } }); } @Override public Control getControl() { return this.viewerPanel; } public IWorkbenchPartSite getSite() { return site; } public ResultSetModel getModel() { return model; } @Override public ResultSetModel getInput() { return model; } @Override public void setInput(Object input) { throw new IllegalArgumentException("ResultSet model can't be changed"); } @Override public ResultSetSelection getSelection() { return new ResultSetSelectionImpl(); } @Override public void setSelection(ISelection selection, boolean reveal) { if (selection instanceof ResultSetSelectionImpl && ((ResultSetSelectionImpl) selection).getResultSetViewer() == this) { // It may occur on simple focus change so we won't do anything return; } spreadsheet.deselectAllCells(); if (!selection.isEmpty() && selection instanceof IStructuredSelection) { List<GridPos> cellSelection = new ArrayList<GridPos>(); for (Iterator iter = ((IStructuredSelection) selection).iterator(); iter.hasNext(); ) { Object cell = iter.next(); if (cell instanceof GridPos) { cellSelection.add((GridPos) cell); } else { log.warn("Bad selection object: " + cell); } } spreadsheet.selectCells(cellSelection); if (reveal) { spreadsheet.showSelection(); } } fireSelectionChanged(new SelectionChangedEvent(this, selection)); } public DBDDataReceiver getDataReceiver() { return dataReceiver; } @Override public void refresh() { // Check if we are dirty if (isDirty()) { switch (promptToSaveOnClose()) { case ISaveablePart2.CANCEL: return; case ISaveablePart2.YES: // Apply changes applyChanges(null, new ResultSetPersister.DataUpdateListener() { @Override public void onUpdate(boolean success) { if (success) { getControl().getDisplay().asyncExec(new Runnable() { @Override public void run() { refresh(); } }); } } }); return; default: // Just ignore previous RS values break; } } // Cache preferences IPreferenceStore preferenceStore = getPreferenceStore(); showOddRows = preferenceStore.getBoolean(DBeaverPreferences.RESULT_SET_SHOW_ODD_ROWS); showCelIcons = preferenceStore.getBoolean(DBeaverPreferences.RESULT_SET_SHOW_CELL_ICONS); // Pump data int oldRowNum = curRowNum; int oldColNum = curColNum; if (resultSetProvider != null && resultSetProvider.isReadyToRun() && getDataContainer() != null && dataPumpJob == null) { int segmentSize = getSegmentMaxRows(); if (oldRowNum >= segmentSize && segmentSize > 0) { segmentSize = (oldRowNum / segmentSize + 1) * segmentSize; } runDataPump(0, segmentSize, new GridPos(oldColNum, oldRowNum), new Runnable() { @Override public void run() { if (!supportsDataFilter() && !model.getDataFilter().hasOrdering()) { reorderLocally(); } } }); } } private boolean isServerSideFiltering() { return getPreferenceStore().getBoolean(DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE) && (dataReceiver.isHasMoreData() || !CommonUtils.isEmpty(model.getDataFilter().getOrder())); } private void reorderResultSet(boolean force, Runnable onSuccess) { if (force || isServerSideFiltering() && supportsDataFilter()) { if (resultSetProvider != null && resultSetProvider.isReadyToRun() && getDataContainer() != null && dataPumpJob == null) { int segmentSize = getSegmentMaxRows(); if (curRowNum >= segmentSize && segmentSize > 0) { segmentSize = (curRowNum / segmentSize + 1) * segmentSize; } runDataPump(0, segmentSize, new GridPos(curColNum, curRowNum), onSuccess); } return; } try { reorderLocally(); } finally { if (onSuccess != null) { onSuccess.run(); } } } private void reorderLocally() { rejectChanges(); model.resetOrdering(); } synchronized void readNextSegment() { if (!dataReceiver.isHasMoreData()) { return; } if (getDataContainer() != null && !model.isUpdateInProgress() && dataPumpJob == null) { dataReceiver.setHasMoreData(false); dataReceiver.setNextSegmentRead(true); runDataPump(model.getRowCount(), getSegmentMaxRows(), null, null); } } int getSegmentMaxRows() { if (getDataContainer() == null) { return 0; } return getPreferenceStore().getInt(DBeaverPreferences.RESULT_SET_MAX_ROWS); } public IPreferenceStore getPreferenceStore() { DBPDataSource dataSource = getDataSource(); if (dataSource != null) { return dataSource.getContainer().getPreferenceStore(); } return DBeaverCore.getGlobalPreferenceStore(); } private synchronized void runDataPump( final int offset, final int maxRows, @Nullable final GridPos oldPos, @Nullable final Runnable finalizer) { if (dataPumpJob == null) { dataPumpJob = new ResultSetDataPumpJob(this); dataPumpJob.addJobChangeListener(new JobChangeAdapter() { @Override public void done(IJobChangeEvent event) { ResultSetDataPumpJob job = (ResultSetDataPumpJob)event.getJob(); final Throwable error = job.getError(); if (job.getStatistics() != null) { model.setStatistics(job.getStatistics()); } Display.getDefault().asyncExec(new Runnable() { @Override public void run() { Control control = getControl(); if (control == null || control.isDisposed()) { return; } final Shell shell = control.getShell(); if (error != null) { setStatus(error.getMessage(), true); UIUtils.showErrorDialog( shell, "Error executing query", "Query execution failed", error); } else if (oldPos != null) { // Seems to be refresh // Restore original position ResultSetViewer.this.curRowNum = Math.min(oldPos.row, model.getRowCount() - 1); ResultSetViewer.this.curColNum = Math.min(oldPos.col, model.getVisibleColumnCount() - 1); GridPos newPos; if (gridMode == GridMode.GRID) { newPos = new GridPos(curColNum, curRowNum); } else { if (ResultSetViewer.this.curRowNum < 0 && model.getRowCount() > 0) { ResultSetViewer.this.curRowNum = 0; } newPos = new GridPos(0, curColNum); } spreadsheet.setCursor(newPos, false); updateStatusMessage(); previewValue(); } else { spreadsheet.redraw(); } updateFiltersText(); if (finalizer != null) { finalizer.run(); } dataPumpJob = null; } }); } }); dataPumpJob.setOffset(offset); dataPumpJob.setMaxRows(maxRows); dataPumpJob.schedule(); } } private void clearData() { model.clearData(); this.curRowNum = -1; this.curColNum = -1; } public void applyChanges(@Nullable DBRProgressMonitor monitor) { applyChanges(monitor, null); } /** * Saves changes to database * @param monitor monitor. If null then save will be executed in async job * @param listener finish listener (may be null) */ public void applyChanges(@Nullable DBRProgressMonitor monitor, @Nullable ResultSetPersister.DataUpdateListener listener) { if (!model.isSingleSource()) { UIUtils.showErrorDialog(getControl().getShell(), "Apply changes error", "Can't save data for result set from multiple sources"); return; } try { if (!model.getRemovedRows().isEmpty() || !model.getEditedValues().isEmpty()) { // If we have deleted or updated rows then check for unique identifier if (!checkVirtualEntityIdentifier()) { //UIUtils.showErrorDialog(getControl().getShell(), "Can't apply changes", "Can't apply data changes - not unique identifier defined"); return; } } new ResultSetPersister(this).applyChanges(monitor, listener); } catch (DBException e) { UIUtils.showErrorDialog(getControl().getShell(), "Apply changes error", "Error saving changes in database", e); } } public void rejectChanges() { new ResultSetPersister(this).rejectChanges(); } public void copySelectionToClipboard( boolean copyHeader, boolean copyRowNumbers, boolean cut, String delimiter, DBDDisplayFormat format) { if (delimiter == null) { delimiter = "\t"; } String lineSeparator = ContentUtils.getDefaultLineSeparator(); List<Integer> colsSelected = new ArrayList<Integer>(); int firstCol = Integer.MAX_VALUE, lastCol = Integer.MIN_VALUE; int firstRow = Integer.MAX_VALUE; Collection<GridPos> selection = spreadsheet.getSelection(); for (GridPos pos : selection) { if (firstCol > pos.col) { firstCol = pos.col; } if (lastCol < pos.col) { lastCol = pos.col; } if (firstRow > pos.row) { firstRow = pos.row; } if (!colsSelected.contains(pos.col)) { colsSelected.add(pos.col); } } ILabelProvider rowLabelProvider = this.spreadsheet.getRowLabelProvider(); int rowNumber = 0; StringBuilder tdt = new StringBuilder(); if (copyHeader) { if (copyRowNumbers) { tdt.append(rowLabelProvider.getText(-1)); } for (int colIndex : colsSelected) { GridColumn column = spreadsheet.getColumn(colIndex); if (tdt.length() > 0) { tdt.append(delimiter); } tdt.append(column.getText()); } tdt.append(lineSeparator); } if (copyRowNumbers) { tdt.append(rowLabelProvider.getText(rowNumber++)).append(delimiter); } int prevRow = firstRow; int prevCol = firstCol; for (GridPos pos : selection) { if (pos.row > prevRow) { if (prevCol < lastCol) { for (int i = prevCol; i < lastCol; i++) { if (colsSelected.contains(i)) { tdt.append(delimiter); } } } tdt.append(lineSeparator); if (copyRowNumbers) { tdt.append(rowLabelProvider.getText(rowNumber++)).append(delimiter); } prevRow = pos.row; prevCol = firstCol; } if (pos.col > prevCol) { for (int i = prevCol; i < pos.col; i++) { if (colsSelected.contains(i)) { tdt.append(delimiter); } } prevCol = pos.col; } GridPos cellPos = translateVisualPos(pos); Object[] curRow = model.getRowData(cellPos.row); Object value = curRow[cellPos.col]; DBDAttributeBinding column = model.getColumn(cellPos.col); String cellText = column.getValueHandler().getValueDisplayString( column.getMetaAttribute(), value, format); if (cellText != null) { tdt.append(cellText); } if (cut) { DBDValueController valueController = new ResultSetValueController( cellPos, DBDValueController.EditType.NONE, null); if (!valueController.isReadOnly()) { valueController.updateValue(DBUtils.makeNullValue(valueController)); } } } if (tdt.length() > 0) { TextTransfer textTransfer = TextTransfer.getInstance(); getSpreadsheet().getClipboard().setContents( new Object[]{tdt.toString()}, new Transfer[]{textTransfer}); } } public void pasteCellValue() { GridPos cell = getCurrentPosition(); if (cell == null) { return; } cell = translateVisualPos(cell); DBDAttributeBinding metaColumn = model.getColumn(cell.col); if (isColumnReadOnly(metaColumn)) { // No inline editors for readonly columns return; } try { Object newValue = getColumnValueFromClipboard(metaColumn); if (newValue == null) { return; } new ResultSetValueController( cell, DBDValueController.EditType.NONE, null).updateValue(newValue); } catch (Exception e) { UIUtils.showErrorDialog(site.getShell(), "Cannot replace cell value", null, e); } } @Nullable private Object getColumnValueFromClipboard(DBDAttributeBinding metaColumn) throws DBCException { DBPDataSource dataSource = getDataSource(); if (dataSource == null) { return null; } DBCSession session = dataSource.openSession(VoidProgressMonitor.INSTANCE, DBCExecutionPurpose.UTIL, "Copy from clipboard"); try { String strValue = (String) getSpreadsheet().getClipboard().getContents(TextTransfer.getInstance()); return metaColumn.getValueHandler().getValueFromObject( session, metaColumn.getMetaAttribute(), strValue, true); } finally { session.close(); } } void addNewRow(final boolean copyCurrent) { GridPos curPos = spreadsheet.getCursorPosition(); int rowNum; if (gridMode == GridMode.RECORD) { rowNum = this.curRowNum; } else { rowNum = curPos.row; } if (rowNum < 0) { rowNum = 0; } model.shiftRows(rowNum, 1); final DBPDataSource dataSource = getDataSource(); if (dataSource == null) { return; } // Add new row final DBDAttributeBinding[] columns = model.getColumns(); final Object[] cells = new Object[columns.length]; final int currentRowNumber = rowNum; try { DBeaverUI.runInProgressService(new DBRRunnableWithProgress() { @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { // Copy cell values in new context DBCSession session = dataSource.openSession(monitor, DBCExecutionPurpose.UTIL, CoreMessages.controls_resultset_viewer_add_new_row_context_name); try { if (copyCurrent && currentRowNumber >= 0 && currentRowNumber < model.getRowCount()) { Object[] origRow = model.getRowData(currentRowNumber); for (int i = 0; i < columns.length; i++) { DBDAttributeBinding metaColumn = columns[i]; DBSAttributeBase attribute = metaColumn.getAttribute(); if (attribute.isAutoGenerated() || attribute.isPseudoAttribute()) { // set pseudo and autoincrement columns to null cells[i] = null; } else { try { cells[i] = metaColumn.getValueHandler().getValueFromObject(session, attribute, origRow[i], true); } catch (DBCException e) { log.warn(e); try { cells[i] = DBUtils.makeNullValue(session, metaColumn.getValueHandler(), attribute); } catch (DBCException e1) { log.warn(e1); } } } } } else { // Initialize new values for (int i = 0; i < columns.length; i++) { DBDAttributeBinding metaColumn = columns[i]; try { cells[i] = DBUtils.makeNullValue(session, metaColumn.getValueHandler(), metaColumn.getAttribute()); } catch (DBCException e) { log.warn(e); } } } } finally { session.close(); } } }); } catch (InvocationTargetException e) { log.error("Could not create new row", e.getTargetException()); } catch (InterruptedException e) { // interrupted - do nothing } model.addNewRow(rowNum, cells); refreshSpreadsheet(false, true); updateEditControls(); fireResultSetChange(); } void deleteSelectedRows() { GridPos curPos = spreadsheet.getCursorPosition(); TreeSet<Integer> rowNumbers = new TreeSet<Integer>(); if (gridMode == GridMode.RECORD) { rowNumbers.add(this.curRowNum); } else { for (GridPos pos : spreadsheet.getSelection()) { rowNumbers.add(pos.row); } } for (Iterator<Integer> iter = rowNumbers.iterator(); iter.hasNext(); ) { int rowNum = iter.next(); if (rowNum < 0 || rowNum >= model.getRowCount()) { iter.remove(); } } if (rowNumbers.isEmpty()) { return; } int rowsRemoved = 0; int lastRowNum = -1; for (Iterator<Integer> iter = rowNumbers.descendingIterator(); iter.hasNext(); ) { int rowNum = iter.next(); if (rowNum > lastRowNum) { lastRowNum = rowNum; } if (model.deleteRow(rowNum)) { rowsRemoved++; } } // Move one row down (if we are in grid mode) if (gridMode == GridMode.GRID && lastRowNum < spreadsheet.getItemCount() - 1) { curPos.row = lastRowNum - rowsRemoved + 1; spreadsheet.setCursor(curPos, false); } if (rowsRemoved > 0) { refreshSpreadsheet(false, true); } else { spreadsheet.redrawGrid(); } updateEditControls(); fireResultSetChange(); } static Image getTypeImage(DBSTypedObject column) { if (column instanceof IObjectImageProvider) { return ((IObjectImageProvider)column).getObjectImage(); } else { return DBIcon.TREE_COLUMN.getImage(); } } ////////////////////////////////// // Virtual identifier management @Nullable DBCEntityIdentifier getVirtualEntityIdentifier() { if (!model.isSingleSource() || model.getVisibleColumnCount() == 0) { return null; } DBDRowIdentifier rowIdentifier = model.getVisibleColumn(0).getRowIdentifier(); DBCEntityIdentifier identifier = rowIdentifier == null ? null : rowIdentifier.getEntityIdentifier(); if (identifier != null && identifier.getReferrer() instanceof DBVEntityConstraint) { return identifier; } else { return null; } } boolean checkVirtualEntityIdentifier() throws DBException { // Check for value locators // Probably we have only virtual one with empty column set final DBCEntityIdentifier identifier = getVirtualEntityIdentifier(); if (identifier != null) { if (CommonUtils.isEmpty(identifier.getAttributes())) { // Empty identifier. We have to define it RunnableWithResult<Boolean> confirmer = new RunnableWithResult<Boolean>() { @Override public void run() { result = ValidateUniqueKeyUsageDialog.validateUniqueKey(ResultSetViewer.this); } }; UIUtils.runInUI(getControl().getShell(), confirmer); return confirmer.getResult(); } } return true; } boolean editEntityIdentifier(DBRProgressMonitor monitor) throws DBException { DBCEntityIdentifier virtualEntityIdentifier = getVirtualEntityIdentifier(); if (virtualEntityIdentifier == null) { log.warn("No virtual identifier"); return false; } DBVEntityConstraint constraint = (DBVEntityConstraint) virtualEntityIdentifier.getReferrer(); EditConstraintDialog dialog = new EditConstraintDialog( getControl().getShell(), "Define virtual unique identifier", constraint); if (dialog.open() != IDialogConstants.OK_ID) { return false; } Collection<DBSEntityAttribute> uniqueColumns = dialog.getSelectedColumns(); constraint.setAttributes(uniqueColumns); virtualEntityIdentifier = getVirtualEntityIdentifier(); if (virtualEntityIdentifier == null) { log.warn("No virtual identifier defined"); return false; } virtualEntityIdentifier.reloadAttributes(monitor, model.getVisibleColumn(0).getMetaAttribute().getEntity()); DBPDataSource dataSource = getDataSource(); if (dataSource != null) { dataSource.getContainer().persistConfiguration(); } return true; } void clearEntityIdentifier(DBRProgressMonitor monitor) throws DBException { DBDAttributeBinding firstColumn = model.getVisibleColumn(0); DBCEntityIdentifier identifier = firstColumn.getRowIdentifier().getEntityIdentifier(); DBVEntityConstraint virtualKey = (DBVEntityConstraint) identifier.getReferrer(); virtualKey.setAttributes(Collections.<DBSEntityAttribute>emptyList()); identifier.reloadAttributes(monitor, firstColumn.getMetaAttribute().getEntity()); virtualKey.getParentObject().setProperty(DBVConstants.PROPERTY_USE_VIRTUAL_KEY_QUIET, null); DBPDataSource dataSource = getDataSource(); if (dataSource != null) { dataSource.getContainer().persistConfiguration(); } } void fireResultSetChange() { synchronized (listeners) { if (!listeners.isEmpty()) { for (ResultSetListener listener : listeners) { listener.handleResultSetChange(); } } } } ///////////////////////////// // Value controller private class ResultSetValueController implements DBDAttributeController, DBDRowController { private final GridPos pos; private final EditType editType; private final Composite inlinePlaceholder; private Object[] curRow; private final DBDAttributeBinding column; private ResultSetValueController(GridPos pos, EditType editType, @Nullable Composite inlinePlaceholder) { this.curRow = model.getRowData(pos.row); this.pos = new GridPos(pos); this.editType = editType; this.inlinePlaceholder = inlinePlaceholder; this.column = model.getColumn(pos.col); } void setCurRow(Object[] curRow) { this.curRow = curRow; } @Nullable @Override public DBPDataSource getDataSource() { return ResultSetViewer.this.getDataSource(); } @Override public String getValueName() { return getAttribute().getName(); } @Override public DBSTypedObject getValueType() { return getAttribute(); } @Override public DBDRowController getRow() { return this; } @Override public DBCAttributeMetaData getAttribute() { return column.getMetaAttribute(); } @Override public String getColumnId() { DBPDataSource dataSource = getDataSource(); return DBUtils.getSimpleQualifiedName( dataSource == null ? null : dataSource.getContainer().getName(), getAttribute().getEntityName(), getAttribute().getName()); } @Override public Object getValue() { return curRow[pos.col]; } @Override public void updateValue(Object value) { if (model.updateCellValue(pos.row, pos.col, value)) { // Update controls site.getShell().getDisplay().syncExec(new Runnable() { @Override public void run() { updateEditControls(); spreadsheet.redrawGrid(); previewValue(); } }); } fireResultSetChange(); } @Override public DBDRowIdentifier getValueLocator() { return column.getRowIdentifier(); } @Override public DBDValueHandler getValueHandler() { return column.getValueHandler(); } @Override public EditType getEditType() { return editType; } @Override public boolean isReadOnly() { return isColumnReadOnly(column); } @Override public IWorkbenchPartSite getValueSite() { return site; } @Nullable @Override public Composite getEditPlaceholder() { return inlinePlaceholder; } @Nullable @Override public ToolBar getEditToolBar() { return isPreviewVisible() ? previewPane.getToolBar() : null; } @Override public void closeInlineEditor() { spreadsheet.cancelInlineEditor(); } @Override public void nextInlineEditor(boolean next) { spreadsheet.cancelInlineEditor(); int colOffset = next ? 1 : -1; int rowOffset = 0; //final int rowCount = spreadsheet.getItemCount(); final int colCount = spreadsheet.getColumnCount(); final GridPos curPosition = spreadsheet.getCursorPosition(); if (colOffset > 0 && curPosition.col + colOffset >= colCount) { colOffset = -colCount; rowOffset = 1; } else if (colOffset < 0 && curPosition.col + colOffset < 0) { colOffset = colCount; rowOffset = -1; } spreadsheet.shiftCursor(colOffset, rowOffset, false); showCellEditor(true); } public void registerEditor(DBDValueEditorStandalone editor) { openEditors.put(this, editor); } @Override public void unregisterEditor(DBDValueEditorStandalone editor) { openEditors.remove(this); } @Override public void showMessage(String message, boolean error) { setStatus(message, error); } @Override public Collection<DBCAttributeMetaData> getAttributesMetaData() { List<DBCAttributeMetaData> attributes = new ArrayList<DBCAttributeMetaData>(); for (DBDAttributeBinding column : model.getVisibleColumns()) { attributes.add(column.getMetaAttribute()); } return attributes; } @Nullable @Override public DBCAttributeMetaData getAttributeMetaData(DBCEntityMetaData entity, String columnName) { for (DBDAttributeBinding column : model.getVisibleColumns()) { if (column.getMetaAttribute().getEntity() == entity && column.getAttributeName().equals(columnName)) { return column.getMetaAttribute(); } } return null; } @Nullable @Override public Object getAttributeValue(DBCAttributeMetaData attribute) { DBDAttributeBinding[] columns = model.getColumns(); for (int i = 0; i < columns.length; i++) { DBDAttributeBinding metaColumn = columns[i]; if (metaColumn.getMetaAttribute() == attribute) { return curRow[i]; } } log.warn("Unknown column value requested: " + attribute); return null; } @Nullable private GridPos getCellPos() { if (pos.row >= 0) { return new GridPos(pos.col, pos.row); } else { return null; } } } static class TableRowInfo { DBSEntity table; DBCEntityIdentifier id; List<GridPos> tableCells = new ArrayList<GridPos>(); TableRowInfo(DBSEntity table, DBCEntityIdentifier id) { this.table = table; this.id = id; } } private class ContentProvider implements IGridContentProvider { @Override public int getRowCount() { return (gridMode == GridMode.RECORD) ? model.getVisibleColumnCount() : model.getRowCount(); } @Override public int getColumnCount() { return (gridMode == GridMode.RECORD) ? 1: model.getVisibleColumnCount(); } @Override public Object getElement(@NotNull GridPos pos) { pos = translateVisualPos(pos); if (gridMode == GridMode.RECORD) { return model.getRowData(curRowNum)[pos.row]; } else { return model.getRowData(pos.row)[pos.col]; } } @NotNull @Override public String getElementText(@NotNull GridPos pos) { Object value = getElement(pos); DBDAttributeBinding column = model.getColumn(translateVisualPos(pos).col); return column.getValueHandler().getValueDisplayString(column.getAttribute(), value, DBDDisplayFormat.EDIT); } @Override public void updateColumn(@NotNull GridColumn column) { if (gridMode == GridMode.RECORD) { column.setSort(SWT.NONE); } else { column.setSort(SWT.DEFAULT); int index = column.getIndex(); DBDAttributeConstraint co = model.getDataFilter().getConstraint(model.getVisibleColumn(index)); if (co.getOrderPosition() > 0) { DBDAttributeBinding binding = co.getAttribute(); if (model.getVisibleColumns().indexOf(binding) == index) { column.setSort(co.isOrderDescending() ? SWT.UP : SWT.DOWN); } } column.setSortRenderer(new SortRenderer(column)); } } @Override public void dispose() { } @Override public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { } } private class ContentLabelProvider implements IGridLabelProvider { @Nullable private Object getValue(int col, int row, boolean formatString) { Object value; DBDAttributeBinding column; int rowNum; int rowCount = model.getRowCount(); if (gridMode == GridMode.RECORD) { // Fill record rowNum = curRowNum; if (curRowNum >= rowCount || curRowNum < 0) { //log.warn("Bad current row number: " + curRowNum); return ""; } column = model.getVisibleColumn(row); Object[] values = model.getRowData(curRowNum); if (column.getAttributeIndex() >= values.length) { log.debug("Bad record row number: " + row); return null; } value = values[column.getAttributeIndex()]; } else { rowNum = row; if (row >= rowCount) { log.debug("Bad grid row number: " + row); return null; } if (col >= model.getVisibleColumnCount()) { log.debug("Bad grid column number: " + col); return null; } column = model.getVisibleColumn(col); value = model.getCellValue(row, column.getAttributeIndex()); } if (rowNum > 0 && rowNum == rowCount - 1 && (gridMode == GridMode.RECORD || spreadsheet.isRowVisible(rowNum)) && dataReceiver.isHasMoreData()) { readNextSegment(); } if (formatString) { return column.getValueHandler().getValueDisplayString( column.getMetaAttribute(), value, DBDDisplayFormat.UI); } else { return value; } } @Nullable @Override public Image getImage(int col, int row) { if (!showCelIcons) { return null; } DBDAttributeBinding attr; if (gridMode == GridMode.RECORD) { if (row >= model.getVisibleColumnCount()) { return null; } attr = model.getVisibleColumn(row); } else { if (col >= model.getVisibleColumnCount()) { return null; } attr = model.getVisibleColumn(col); } if ((attr.getValueHandler().getFeatures() & DBDValueHandler.FEATURE_SHOW_ICON) != 0) { return getTypeImage(attr.getMetaAttribute()); } else { return null; } } @Override public String getText(int col, int row) { return String.valueOf(getValue(col, row, true)); } @Nullable @Override public Color getForeground(int col, int row) { Object value = getValue(col, row, false); if (DBUtils.isNullValue(value)) { return foregroundNull; } else { return null; } } @Nullable @Override public Color getBackground(int col, int row) { if (gridMode == GridMode.RECORD) { col = row; row = curRowNum; } if (model.isRowAdded(row)) { return backgroundAdded; } if (model.isRowDeleted(row)) { return backgroundDeleted; } if (model.isDirty() && model.isCellModified( new GridPos(model.getVisibleColumn(col).getAttributeIndex(), row))) { return backgroundModified; } if (row % 2 == 0 && showOddRows) { return backgroundOdd; } return null; } } private class ColumnLabelProvider extends LabelProvider implements IFontProvider, ITooltipProvider { @Nullable @Override public Image getImage(Object element) { if (gridMode == GridMode.GRID) { int colNumber = ((Number)element).intValue(); return getTypeImage(model.getVisibleColumn(colNumber).getMetaAttribute()); } return null; } @Nullable @Override public String getText(Object element) { int colNumber = ((Number)element).intValue(); if (gridMode == GridMode.RECORD) { if (colNumber == 0) { return CoreMessages.controls_resultset_viewer_value; } else { log.warn("Bad column index: " + colNumber); return null; } } else { DBDAttributeBinding metaColumn = model.getVisibleColumn(colNumber); DBCAttributeMetaData attribute = metaColumn.getMetaAttribute(); if (CommonUtils.isEmpty(attribute.getLabel())) { return metaColumn.getAttributeName(); } else { return attribute.getLabel(); } /* return CommonUtils.isEmpty(metaColumn.getMetaData().getEntityName()) ? metaColumn.getMetaData().getName() : metaColumn.getMetaData().getEntityName() + "." + metaColumn.getMetaData().getName(); */ } } @Nullable @Override public Font getFont(Object element) { int colNumber = ((Number)element).intValue(); if (gridMode == GridMode.GRID) { DBDAttributeConstraint constraint = model.getDataFilter().getConstraint(model.getVisibleColumn(colNumber)); if (constraint != null && constraint.hasFilter()) { return boldFont; } } return null; } @Nullable @Override public String getTooltip(Object element) { int colNumber = ((Number)element).intValue(); if (gridMode == GridMode.GRID) { DBDAttributeBinding metaColumn = model.getVisibleColumn(colNumber); String name = metaColumn.getAttributeName(); String typeName = DBUtils.getFullTypeName(metaColumn.getMetaAttribute()); return name + ": " + typeName; } return null; } } private class RowLabelProvider extends LabelProvider { @Nullable @Override public Image getImage(Object element) { if (gridMode == GridMode.RECORD) { int rowNumber = ((Number) element).intValue(); if (rowNumber < 0) return null; return getTypeImage(model.getVisibleColumn(rowNumber).getMetaAttribute()); } return null; } @Nullable @Override public String getText(Object element) { int rowNumber = ((Number) element).intValue(); if (gridMode == GridMode.RECORD) { if (rowNumber < 0) return "Name"; return model.getVisibleColumn(rowNumber).getAttributeName(); } else { if (rowNumber < 0) return "#"; return String.valueOf(rowNumber + 1); } } } private class TopLeftRenderer extends DefaultRowHeaderRenderer { public TopLeftRenderer(LightGrid grid) { super(grid); } } private class ConfigAction extends Action implements IMenuCreator { public ConfigAction() { super(CoreMessages.controls_resultset_viewer_action_options, IAction.AS_DROP_DOWN_MENU); setImageDescriptor(DBIcon.CONFIGURATION.getImageDescriptor()); } @Override public IMenuCreator getMenuCreator() { return this; } @Override public void runWithEvent(Event event) { Menu menu = getMenu(getSpreadsheet()); if (menu != null && event.widget instanceof ToolItem) { Rectangle bounds = ((ToolItem) event.widget).getBounds(); Point point = ((ToolItem) event.widget).getParent().toDisplay(bounds.x, bounds.y + bounds.height); menu.setLocation(point.x, point.y); menu.setVisible(true); } } @Override public void dispose() { } @Override public Menu getMenu(Control parent) { MenuManager menuManager = new MenuManager(); menuManager.add(new ShowFiltersAction()); menuManager.add(new Separator()); menuManager.add(new VirtualKeyEditAction(true)); menuManager.add(new VirtualKeyEditAction(false)); menuManager.add(new DictionaryEditAction()); menuManager.add(new Separator()); menuManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_MODE, CommandContributionItem.STYLE_CHECK)); menuManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_PREVIEW, CommandContributionItem.STYLE_CHECK)); menuManager.add(new Separator()); menuManager.add(new Action("Preferences") { @Override public void run() { UIUtils.showPreferencesFor( getControl().getShell(), ResultSetViewer.this, PrefPageDatabaseGeneral.PAGE_ID); } }); return menuManager.createContextMenu(parent); } @Nullable @Override public Menu getMenu(Menu parent) { return null; } } private class ShowFiltersAction extends Action { public ShowFiltersAction() { super(CoreMessages.controls_resultset_viewer_action_order_filter, DBIcon.FILTER.getImageDescriptor()); } @Override public void run() { new ResultSetFilterDialog(ResultSetViewer.this).open(); } } private class ToggleServerSideOrderingAction extends Action { public ToggleServerSideOrderingAction() { super(CoreMessages.pref_page_database_resultsets_label_server_side_order); } @Override public int getStyle() { return AS_CHECK_BOX; } @Override public boolean isChecked() { return getPreferenceStore().getBoolean(DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE); } @Override public void run() { IPreferenceStore preferenceStore = getPreferenceStore(); preferenceStore.setValue( DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE, !preferenceStore.getBoolean(DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE)); } } private enum FilterByColumnType { VALUE(DBIcon.FILTER_VALUE.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { Object value = viewer.model.getCellValue( viewer.getCurrentRow(), column.getAttributeIndex()); return column.getValueHandler().getValueDisplayString(column.getMetaAttribute(), value, format); } }, INPUT(DBIcon.FILTER_INPUT.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { if (useDefault) { return ".."; } else { return EditTextDialog.editText( viewer.getControl().getShell(), "Enter value", ""); } } }, CLIPBOARD(DBIcon.FILTER_CLIPBOARD.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { try { return column.getValueHandler().getValueDisplayString( column.getMetaAttribute(), viewer.getColumnValueFromClipboard(column), format); } catch (DBCException e) { log.debug("Error copying from clipboard", e); return null; } } }, NONE(DBIcon.FILTER_VALUE.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { return ""; } }; final ImageDescriptor icon; private FilterByColumnType(ImageDescriptor icon) { this.icon = icon; } @Nullable abstract String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format); } private String translateFilterPattern(String pattern, FilterByColumnType type, DBDAttributeBinding column) { String value = CommonUtils.truncateString( CommonUtils.toString( type.getValue(this, column, true, DBDDisplayFormat.UI)), 30); return pattern.replace("?", value); } private class FilterByColumnAction extends Action { private final String pattern; private final FilterByColumnType type; private final DBDAttributeBinding column; public FilterByColumnAction(String pattern, FilterByColumnType type, DBDAttributeBinding column) { super(column.getAttributeName() + " " + translateFilterPattern(pattern, type, column), type.icon); this.pattern = pattern; this.type = type; this.column = column; } @Override public void run() { String value = type.getValue(ResultSetViewer.this, column, false, DBDDisplayFormat.NATIVE); if (value == null) { return; } String stringValue = pattern.replace("?", value); DBDDataFilter filter = model.getDataFilter(); filter.getConstraint(column).setCriteria(stringValue); updateFiltersText(); refresh(); } } private class FilterResetColumnAction extends Action { private final DBDAttributeBinding column; public FilterResetColumnAction(DBDAttributeBinding column) { super("Remove filter for '" + column.getAttributeName() + "'", DBIcon.REVERT.getImageDescriptor()); this.column = column; } @Override public void run() { model.getDataFilter().getConstraint(column).setCriteria(null); updateFiltersText(); refresh(); } } private class VirtualKeyEditAction extends Action { private boolean define; public VirtualKeyEditAction(boolean define) { super(define ? "Define virtual unique key" : "Clear virtual unique key"); this.define = define; } @Override public boolean isEnabled() { DBCEntityIdentifier identifier = getVirtualEntityIdentifier(); return identifier != null && (define || !CommonUtils.isEmpty(identifier.getAttributes())); } @Override public void run() { DBeaverUI.runUIJob("Edit virtual key", new DBRRunnableWithProgress() { @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { try { if (define) { editEntityIdentifier(monitor); } else { clearEntityIdentifier(monitor); } } catch (DBException e) { throw new InvocationTargetException(e); } } }); } } private class DictionaryEditAction extends Action { public DictionaryEditAction() { super("Define dictionary"); } @Override public void run() { } @Override public boolean isEnabled() { return false; } } private class ResultSetSelectionImpl implements ResultSetSelection { @Nullable @Override public GridPos getFirstElement() { Collection<GridPos> ssSelection = spreadsheet.getSelection(); return ssSelection.isEmpty() ? null : ssSelection.iterator().next(); } @Override public Iterator iterator() { return spreadsheet.getSelection().iterator(); } @Override public int size() { return spreadsheet.getSelection().size(); } @Override public Object[] toArray() { return spreadsheet.getSelection().toArray(); } @Override public List toList() { return new ArrayList<GridPos>(spreadsheet.getSelection()); } @Override public boolean isEmpty() { return spreadsheet.getSelection().isEmpty(); } @Override public ResultSetViewer getResultSetViewer() { return ResultSetViewer.this; } @Override public Collection<ResultSetRow> getSelectedRows() { List<ResultSetRow> rows = new ArrayList<ResultSetRow>(); if (gridMode == GridMode.RECORD) { if (curRowNum < 0 || curRowNum >= model.getRowCount()) { return Collections.emptyList(); } rows.add(new ResultSetRow(ResultSetViewer.this, model.getRowData(curRowNum))); } else { Collection<Integer> rowSelection = spreadsheet.getRowSelection(); for (Integer row : rowSelection) { rows.add(new ResultSetRow(ResultSetViewer.this, model.getRowData(row))); } } return rows; } @Override public boolean equals(Object obj) { return obj instanceof ResultSetSelectionImpl && super.equals(obj); } } /** * The column header sort arrow renderer. */ static class SortRenderer extends AbstractRenderer { private Image asterisk; private Image arrowUp; private Image arrowDown; private GridColumn column; private Cursor hoverCursor; SortRenderer(GridColumn column) { super(column.getParent()); this.column = column; this.asterisk = DBIcon.SORT_UNKNOWN.getImage(); this.arrowUp = DBIcon.SORT_DECREASE.getImage(); this.arrowDown = DBIcon.SORT_INCREASE.getImage(); this.hoverCursor = getDisplay().getSystemCursor(SWT.CURSOR_HAND); Rectangle imgBounds = arrowUp.getBounds(); setSize(imgBounds.width, imgBounds.height); } @Override public void paint(GC gc) { Rectangle bounds = getBounds(); switch (column.getSort()) { case SWT.DEFAULT: gc.drawImage(asterisk, bounds.x, bounds.y); break; case SWT.UP: gc.drawImage(arrowUp, bounds.x, bounds.y); break; case SWT.DOWN: gc.drawImage(arrowDown, bounds.x, bounds.y); break; } /* if (isSelected()) { gc.drawLine(bounds.x, bounds.y, bounds.x + 6, bounds.y); gc.drawLine(bounds.x + 1, bounds.y + 1, bounds.x + 5, bounds.y + 1); gc.drawLine(bounds.x + 2, bounds.y + 2, bounds.x + 4, bounds.y + 2); gc.drawPoint(bounds.x + 3, bounds.y + 3); } else { gc.drawPoint(bounds.x + 3, bounds.y); gc.drawLine(bounds.x + 2, bounds.y + 1, bounds.x + 4, bounds.y + 1); gc.drawLine(bounds.x + 1, bounds.y + 2, bounds.x + 5, bounds.y + 2); gc.drawLine(bounds.x, bounds.y + 3, bounds.x + 6, bounds.y + 3); } */ } @Override public Cursor getHoverCursor() { return hoverCursor; } } /* static class TopLeftRenderer extends AbstractRenderer { private Button cfgButton; public TopLeftRenderer(final ResultSetViewer resultSetViewer) { super(resultSetViewer.getSpreadsheet()); cfgButton = new Button(grid, SWT.FLAT | SWT.NO_FOCUS); cfgButton.setImage(DBIcon.FILTER.getImage()); cfgButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { new ResultSetFilterDialog(resultSetViewer).open(); } }); ControlEditor controlEditor = new ControlEditor(grid); controlEditor.setEditor(cfgButton); //cfgButton.setText("..."); } @Override public void setBounds(Rectangle bounds) { Rectangle cfgBounds = new Rectangle(bounds.x + 1, bounds.y + 1, bounds.width - 2, bounds.height - 2); cfgButton.setBounds(bounds); super.setBounds(bounds); } @Override public void paint(GC gc) { //cfgButton.redraw(); //gc.drawImage(DBIcon.FILTER.getImage(), 0, 0); } } */ }
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/ResultSetViewer.java
/* * Copyright (C) 2010-2013 Serge Rieder * [email protected] * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package org.jkiss.dbeaver.ui.controls.resultset; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.eclipse.core.runtime.IAdaptable; import org.eclipse.core.runtime.IProgressMonitor; import org.eclipse.core.runtime.IStatus; import org.eclipse.core.runtime.Status; import org.eclipse.core.runtime.jobs.IJobChangeEvent; import org.eclipse.core.runtime.jobs.JobChangeAdapter; import org.eclipse.jface.action.*; import org.eclipse.jface.dialogs.ControlEnableState; import org.eclipse.jface.dialogs.IDialogConstants; import org.eclipse.jface.preference.IPreferenceStore; import org.eclipse.jface.resource.ImageDescriptor; import org.eclipse.jface.text.IFindReplaceTarget; import org.eclipse.jface.util.IPropertyChangeListener; import org.eclipse.jface.util.PropertyChangeEvent; import org.eclipse.jface.viewers.*; import org.eclipse.osgi.util.NLS; import org.eclipse.swt.SWT; import org.eclipse.swt.custom.SashForm; import org.eclipse.swt.dnd.TextTransfer; import org.eclipse.swt.dnd.Transfer; import org.eclipse.swt.events.*; import org.eclipse.swt.graphics.*; import org.eclipse.swt.layout.FillLayout; import org.eclipse.swt.layout.GridData; import org.eclipse.swt.layout.GridLayout; import org.eclipse.swt.widgets.*; import org.eclipse.ui.ISaveablePart2; import org.eclipse.ui.IWorkbenchCommandConstants; import org.eclipse.ui.IWorkbenchPartSite; import org.eclipse.ui.commands.ICommandService; import org.eclipse.ui.menus.CommandContributionItem; import org.eclipse.ui.progress.UIJob; import org.eclipse.ui.themes.ITheme; import org.eclipse.ui.themes.IThemeManager; import org.eclipse.ui.views.properties.IPropertySheetPage; import org.eclipse.ui.views.properties.IPropertySource; import org.eclipse.ui.views.properties.IPropertySourceProvider; import org.jkiss.code.NotNull; import org.jkiss.code.Nullable; import org.jkiss.dbeaver.DBException; import org.jkiss.dbeaver.DBeaverPreferences; import org.jkiss.dbeaver.core.CoreMessages; import org.jkiss.dbeaver.core.DBeaverCore; import org.jkiss.dbeaver.core.DBeaverUI; import org.jkiss.dbeaver.ext.IDataSourceProvider; import org.jkiss.dbeaver.ext.ui.IObjectImageProvider; import org.jkiss.dbeaver.ext.ui.ITooltipProvider; import org.jkiss.dbeaver.model.DBPDataKind; import org.jkiss.dbeaver.model.DBPDataSource; import org.jkiss.dbeaver.model.DBUtils; import org.jkiss.dbeaver.model.data.*; import org.jkiss.dbeaver.model.exec.*; import org.jkiss.dbeaver.model.runtime.DBRProgressMonitor; import org.jkiss.dbeaver.model.runtime.DBRRunnableWithProgress; import org.jkiss.dbeaver.model.struct.*; import org.jkiss.dbeaver.model.virtual.DBVConstants; import org.jkiss.dbeaver.model.virtual.DBVEntityConstraint; import org.jkiss.dbeaver.runtime.RunnableWithResult; import org.jkiss.dbeaver.runtime.RuntimeUtils; import org.jkiss.dbeaver.runtime.VoidProgressMonitor; import org.jkiss.dbeaver.tools.transfer.IDataTransferProducer; import org.jkiss.dbeaver.tools.transfer.database.DatabaseTransferProducer; import org.jkiss.dbeaver.tools.transfer.wizard.DataTransferWizard; import org.jkiss.dbeaver.ui.*; import org.jkiss.dbeaver.ui.controls.lightgrid.*; import org.jkiss.dbeaver.ui.controls.lightgrid.renderers.AbstractRenderer; import org.jkiss.dbeaver.ui.controls.lightgrid.renderers.DefaultRowHeaderRenderer; import org.jkiss.dbeaver.ui.controls.spreadsheet.ISpreadsheetController; import org.jkiss.dbeaver.ui.controls.spreadsheet.Spreadsheet; import org.jkiss.dbeaver.ui.dialogs.ActiveWizardDialog; import org.jkiss.dbeaver.ui.dialogs.ConfirmationDialog; import org.jkiss.dbeaver.ui.dialogs.EditTextDialog; import org.jkiss.dbeaver.ui.dialogs.sql.ViewSQLDialog; import org.jkiss.dbeaver.ui.dialogs.struct.EditConstraintDialog; import org.jkiss.dbeaver.ui.preferences.PrefPageDatabaseGeneral; import org.jkiss.dbeaver.ui.properties.PropertyCollector; import org.jkiss.dbeaver.ui.properties.tabbed.PropertyPageStandard; import org.jkiss.dbeaver.utils.ContentUtils; import org.jkiss.utils.CommonUtils; import java.lang.reflect.InvocationTargetException; import java.util.*; import java.util.List; /** * ResultSetViewer */ public class ResultSetViewer extends Viewer implements IDataSourceProvider, ISpreadsheetController, IPropertyChangeListener, ISaveablePart2, IAdaptable { static final Log log = LogFactory.getLog(ResultSetViewer.class); private static final int DEFAULT_ROW_HEADER_WIDTH = 50; private ResultSetValueController panelValueController; private static final String VIEW_PANEL_VISIBLE = "viewPanelVisible"; private static final String VIEW_PANEL_RATIO = "viewPanelRatio"; public enum GridMode { GRID, RECORD } public enum RowPosition { FIRST, PREVIOUS, NEXT, LAST } private final IWorkbenchPartSite site; private final Composite viewerPanel; private Composite filtersPanel; private ControlEnableState filtersEnableState; private Combo filtersText; private Text statusLabel; private final SashForm resultsSash; private final Spreadsheet spreadsheet; private final ViewValuePanel previewPane; private final ResultSetProvider resultSetProvider; private final ResultSetDataReceiver dataReceiver; private final IThemeManager themeManager; private ToolBarManager toolBarManager; // Current row/col number private int curRowNum = -1; private int curColNum = -1; // Mode private GridMode gridMode; private final Map<ResultSetValueController, DBDValueEditorStandalone> openEditors = new HashMap<ResultSetValueController, DBDValueEditorStandalone>(); private final List<ResultSetListener> listeners = new ArrayList<ResultSetListener>(); // UI modifiers private final Color colorRed; private Color backgroundAdded; private Color backgroundDeleted; private Color backgroundModified; private Color backgroundOdd; private final Color foregroundNull; private final Font boldFont; private volatile ResultSetDataPumpJob dataPumpJob; private ResultSetFindReplaceTarget findReplaceTarget; private final ResultSetModel model = new ResultSetModel(); private boolean showOddRows = true; private boolean showCelIcons = true; public ResultSetViewer(Composite parent, IWorkbenchPartSite site, ResultSetProvider resultSetProvider) { super(); /* if (!adapterRegistered) { ResultSetAdapterFactory nodesAdapter = new ResultSetAdapterFactory(); IAdapterManager mgr = Platform.getAdapterManager(); mgr.registerAdapters(nodesAdapter, ResultSetProvider.class); mgr.registerAdapters(nodesAdapter, IPageChangeProvider.class); adapterRegistered = true; } */ this.site = site; this.gridMode = GridMode.GRID; this.resultSetProvider = resultSetProvider; this.dataReceiver = new ResultSetDataReceiver(this); this.colorRed = Display.getDefault().getSystemColor(SWT.COLOR_RED); this.foregroundNull = parent.getDisplay().getSystemColor(SWT.COLOR_GRAY); this.boldFont = UIUtils.makeBoldFont(parent.getFont()); this.viewerPanel = UIUtils.createPlaceholder(parent, 1); UIUtils.setHelp(this.viewerPanel, IHelpContextIds.CTX_RESULT_SET_VIEWER); createFiltersPanel(); { resultsSash = new SashForm(viewerPanel, SWT.HORIZONTAL | SWT.SMOOTH); resultsSash.setLayoutData(new GridData(GridData.FILL_BOTH)); resultsSash.setSashWidth(5); //resultsSash.setBackground(resultsSash.getDisplay().getSystemColor(SWT.COLOR_WIDGET_BACKGROUND)); this.spreadsheet = new Spreadsheet( resultsSash, SWT.MULTI | SWT.VIRTUAL | SWT.H_SCROLL | SWT.V_SCROLL, site, this, new ContentProvider(), new ContentLabelProvider(), new ColumnLabelProvider(), new RowLabelProvider()); this.spreadsheet.setTopLeftRenderer(new TopLeftRenderer(this.spreadsheet)); this.spreadsheet.setLayoutData(new GridData(GridData.FILL_BOTH)); this.previewPane = new ViewValuePanel(resultsSash) { @Override protected void hidePanel() { togglePreview(); } }; final IPreferenceStore preferences = getPreferences(); int ratio = preferences.getInt(VIEW_PANEL_RATIO); boolean viewPanelVisible = preferences.getBoolean(VIEW_PANEL_VISIBLE); if (ratio <= 0) { ratio = 750; } resultsSash.setWeights(new int[]{ratio, 1000 - ratio}); if (!viewPanelVisible) { resultsSash.setMaximizedControl(spreadsheet); } previewPane.addListener(SWT.Resize, new Listener() { @Override public void handleEvent(Event event) { DBPDataSource dataSource = getDataSource(); if (dataSource != null) { if (!resultsSash.isDisposed()) { int[] weights = resultsSash.getWeights(); int ratio = weights[0]; preferences.setValue(VIEW_PANEL_RATIO, ratio); } } } }); } createStatusBar(viewerPanel); changeMode(GridMode.GRID); this.themeManager = site.getWorkbenchWindow().getWorkbench().getThemeManager(); this.themeManager.addPropertyChangeListener(this); this.spreadsheet.addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { dispose(); } }); this.spreadsheet.addCursorChangeListener(new Listener() { @Override public void handleEvent(Event event) { updateGridCursor(event.x, event.y); } }); //this.spreadsheet.setTopLeftRenderer(new TopLeftRenderer(this)); applyThemeSettings(); spreadsheet.addFocusListener(new FocusListener() { @Override public void focusGained(FocusEvent e) { updateToolbar(); } @Override public void focusLost(FocusEvent e) { updateToolbar(); } }); this.spreadsheet.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { fireSelectionChanged(new SelectionChangedEvent(ResultSetViewer.this, new ResultSetSelectionImpl())); } }); } //////////////////////////////////////////////////////////// // Filters private void createFiltersPanel() { filtersPanel = new Composite(viewerPanel, SWT.NONE); filtersPanel.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); GridLayout gl = new GridLayout(5, false); gl.marginHeight = 3; gl.marginWidth = 3; filtersPanel.setLayout(gl); Button sourceQueryButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); sourceQueryButton.setImage(DBIcon.SQL_TEXT.getImage()); sourceQueryButton.setText("SQL"); sourceQueryButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { String queryText = model.getStatistics() == null ? null : model.getStatistics().getQueryText(); if (queryText == null || queryText.isEmpty()) { queryText = "<empty>"; } ViewSQLDialog dialog = new ViewSQLDialog(site, getDataSource(), "Query Text", DBIcon.SQL_TEXT.getImage(), queryText); dialog.setEnlargeViewPanel(false); dialog.setWordWrap(true); dialog.open(); } }); Button customizeButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); customizeButton.setImage(DBIcon.FILTER.getImage()); customizeButton.setText("Filters"); customizeButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { new ResultSetFilterDialog(ResultSetViewer.this).open(); } }); //UIUtils.createControlLabel(filtersPanel, " Filter"); this.filtersText = new Combo(filtersPanel, SWT.BORDER | SWT.DROP_DOWN); this.filtersText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL)); this.filtersText.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { setCustomDataFilter(); } }); { // Register filters text in focus service UIUtils.addFocusTracker(site, UIUtils.INLINE_WIDGET_EDITOR_ID, this.filtersText); this.filtersText.addDisposeListener(new DisposeListener() { @Override public void widgetDisposed(DisposeEvent e) { // Unregister from focus service UIUtils.removeFocusTracker(ResultSetViewer.this.site, filtersText); dispose(); } }); } // Handle all shortcuts by filters editor, not by host editor this.filtersText.addFocusListener(new FocusListener() { private boolean activated = false; @Override public void focusGained(FocusEvent e) { if (!activated) { UIUtils.enableHostEditorKeyBindings(site, false); activated = true; } } @Override public void focusLost(FocusEvent e) { if (activated) { UIUtils.enableHostEditorKeyBindings(site, true); activated = false; } } }); final Button applyButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); applyButton.setText("Apply"); applyButton.setToolTipText("Apply filter criteria"); applyButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { setCustomDataFilter(); } }); applyButton.setEnabled(false); final Button clearButton = new Button(filtersPanel, SWT.PUSH | SWT.NO_FOCUS); clearButton.setText("X"); clearButton.setToolTipText("Remove all filters"); clearButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { resetDataFilter(true); } }); clearButton.setEnabled(false); this.filtersText.addModifyListener(new ModifyListener() { @Override public void modifyText(ModifyEvent e) { if (filtersEnableState == null) { String filterText = filtersText.getText(); applyButton.setEnabled(true); clearButton.setEnabled(!CommonUtils.isEmpty(filterText)); } } }); filtersPanel.addTraverseListener(new TraverseListener() { @Override public void keyTraversed(TraverseEvent e) { if (e.detail == SWT.TRAVERSE_RETURN) { setCustomDataFilter(); e.doit = false; e.detail = SWT.TRAVERSE_NONE; } } }); filtersEnableState = ControlEnableState.disable(filtersPanel); } public void resetDataFilter(boolean refresh) { setDataFilter(model.createDataFilter(), refresh); } private void setCustomDataFilter() { DBPDataSource dataSource = getDataSource(); if (dataSource == null) { return; } String condition = filtersText.getText(); StringBuilder currentCondition = new StringBuilder(); model.getDataFilter().appendConditionString(dataSource, currentCondition); if (currentCondition.toString().trim().equals(condition.trim())) { // The same return; } DBDDataFilter newFilter = model.createDataFilter(); newFilter.setWhere(condition); setDataFilter(newFilter, true); spreadsheet.setFocus(); } public void updateFiltersText() { boolean enableFilters = false; DBPDataSource dataSource = getDataSource(); if (dataSource != null) { StringBuilder where = new StringBuilder(); model.getDataFilter().appendConditionString(dataSource, where); String whereCondition = where.toString().trim(); filtersText.setText(whereCondition); if (!whereCondition.isEmpty()) { addFiltersHistory(whereCondition); } if (resultSetProvider.isReadyToRun() && !model.isUpdateInProgress() && (!CommonUtils.isEmpty(whereCondition) || (getModel().getVisibleColumnCount() > 0 && supportsDataFilter()))) { enableFilters = true; } } if (enableFilters) { if (filtersEnableState != null) { filtersEnableState.restore(); filtersEnableState = null; } } else if (filtersEnableState == null) { filtersEnableState = ControlEnableState.disable(filtersPanel); } } private void addFiltersHistory(String whereCondition) { int historyCount = filtersText.getItemCount(); for (int i = 0; i < historyCount; i++) { if (filtersText.getItem(i).equals(whereCondition)) { if (i > 0) { // Move to beginning filtersText.remove(i); break; } else { return; } } } filtersText.add(whereCondition, 0); filtersText.setText(whereCondition); } public void setDataFilter(final DBDDataFilter dataFilter, boolean refreshData) { if (!CommonUtils.equalObjects(model.getDataFilter(), dataFilter)) { if (model.setDataFilter(dataFilter)) { refreshSpreadsheet(true, true); } if (refreshData) { reorderResultSet(true, new Runnable() { @Override public void run() { resetColumnOrdering(); } }); } } this.updateFiltersText(); } //////////////////////////////////////////////////////////// // Misc IPreferenceStore getPreferences() { return DBeaverCore.getGlobalPreferenceStore(); } @Override public DBPDataSource getDataSource() { DBSDataContainer dataContainer = getDataContainer(); return dataContainer == null ? null : dataContainer.getDataSource(); } public IFindReplaceTarget getFindReplaceTarget() { if (findReplaceTarget == null) { findReplaceTarget = new ResultSetFindReplaceTarget(this); } return findReplaceTarget; } @Nullable @Override public Object getAdapter(Class adapter) { if (adapter == IPropertySheetPage.class) { // Show cell properties PropertyPageStandard page = new PropertyPageStandard(); page.setPropertySourceProvider(new IPropertySourceProvider() { @Nullable @Override public IPropertySource getPropertySource(Object object) { if (object instanceof GridPos) { final GridPos cell = translateVisualPos((GridPos) object); if (isValidCell(cell)) { final ResultSetValueController valueController = new ResultSetValueController( cell, DBDValueController.EditType.NONE, null); PropertyCollector props = new PropertyCollector(valueController.getAttribute(), false); props.collectProperties(); valueController.getValueHandler().contributeProperties(props, valueController); return props; } } return null; } }); return page; } else if (adapter == IFindReplaceTarget.class) { return getFindReplaceTarget(); } return null; } public void addListener(ResultSetListener listener) { synchronized (listeners) { listeners.add(listener); } } public void removeListener(ResultSetListener listener) { synchronized (listeners) { listeners.remove(listener); } } private void updateGridCursor(int col, int row) { boolean changed; if (gridMode == GridMode.GRID) { changed = curRowNum != row || curColNum != col; curRowNum = row; curColNum = col; } else { changed = curColNum != row; curColNum = row; } if (changed) { ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_CAN_MOVE); ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_EDITABLE); updateToolbar(); if (col >= 0 && row >= 0) { previewValue(); } } } private void updateRecordMode() { int oldColNum = this.curColNum; this.initResultSet(); this.curColNum = oldColNum; spreadsheet.setCursor(new GridPos(0, oldColNum), false); } void updateEditControls() { ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_EDITABLE); ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_CHANGED); updateToolbar(); } /** * It is a hack function. Generally all command associated widgets should be updated automatically by framework. * Freaking E4 do not do it. I've spent a couple of days fighting it. Guys, you owe me. * TODO: just remove in future. In fact everything must work without it */ private void updateToolbar() { if (toolBarManager.isEmpty()) { return; } for (IContributionItem item : toolBarManager.getItems()) { item.update(); } } void refreshSpreadsheet(boolean columnsChanged, boolean rowsChanged) { if (spreadsheet.isDisposed()) { return; } if (rowsChanged) { if (curRowNum >= model.getRowCount()) { curRowNum = model.getRowCount() - 1; } GridPos curPos = new GridPos(spreadsheet.getCursorPosition()); if (gridMode == GridMode.GRID) { if (curPos.row >= model.getRowCount()) { curPos.row = model.getRowCount() - 1; } } this.spreadsheet.reinitState(columnsChanged); // Set cursor on new row if (gridMode == GridMode.GRID) { spreadsheet.setCursor(curPos, false); } else { updateRecordMode(); } } else { this.spreadsheet.redrawGrid(); } } private void createStatusBar(Composite parent) { UIUtils.createHorizontalLine(parent); Composite statusBar = new Composite(parent, SWT.NONE); GridData gd = new GridData(GridData.FILL_HORIZONTAL); statusBar.setLayoutData(gd); GridLayout gl = new GridLayout(4, false); gl.marginWidth = 0; gl.marginHeight = 3; //gl.marginBottom = 5; statusBar.setLayout(gl); statusLabel = new Text(statusBar, SWT.READ_ONLY); gd = new GridData(GridData.FILL_HORIZONTAL); statusLabel.setLayoutData(gd); statusLabel.addMouseListener(new MouseAdapter() { @Override public void mouseDoubleClick(MouseEvent e) { EditTextDialog.showText(site.getShell(), CoreMessages.controls_resultset_viewer_dialog_status_title, statusLabel.getText()); } }); /* IAction viewMessageAction = new Action("View status message", DBIcon.TREE_INFO.getImageDescriptor()) { public void run() { } }; */ toolBarManager = new ToolBarManager(SWT.FLAT | SWT.HORIZONTAL); // handle own commands toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_APPLY_CHANGES)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_REJECT_CHANGES)); toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_EDIT)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_ADD)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_COPY)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_DELETE)); toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_FIRST)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_PREVIOUS)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_NEXT)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_LAST)); toolBarManager.add(new Separator()); // Link to standard Find/Replace action - it has to be handled by owner site toolBarManager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_FIND_AND_REPLACE, CommandContributionItem.STYLE_PUSH, DBIcon.FIND_TEXT.getImageDescriptor())); // Use simple action for refresh to avoid ambiguous behaviour of F5 shortcut //toolBarManager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.FILE_REFRESH, "Refresh result set", DBIcon.RS_REFRESH.getImageDescriptor())); Action refreshAction = new Action(CoreMessages.controls_resultset_viewer_action_refresh, DBIcon.RS_REFRESH.getImageDescriptor()) { @Override public void run() { refresh(); } }; toolBarManager.add(refreshAction); toolBarManager.add(new Separator()); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_MODE, CommandContributionItem.STYLE_CHECK)); toolBarManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_PREVIEW, CommandContributionItem.STYLE_CHECK)); toolBarManager.add(new ConfigAction()); toolBarManager.createControl(statusBar); //updateEditControls(); } public Spreadsheet getSpreadsheet() { return spreadsheet; } public DBSDataContainer getDataContainer() { return resultSetProvider.getDataContainer(); } // Update all columns ordering private void resetColumnOrdering() { if (!spreadsheet.isDisposed() && gridMode == GridMode.GRID) { List<DBDAttributeBinding> visibleColumns = model.getVisibleColumns(); for (int i = 0, metaColumnsLength = visibleColumns.size(); i < metaColumnsLength; i++) { DBDAttributeBinding column = visibleColumns.get(i); DBDAttributeConstraint constraint = model.getDataFilter().getConstraint(column); GridColumn gridColumn = spreadsheet.getColumn(i); if (constraint == null || constraint.getOrderPosition() == 0) { gridColumn.setSort(SWT.DEFAULT); } else { gridColumn.setSort(constraint.isOrderDescending() ? SWT.UP : SWT.DOWN); } } spreadsheet.redrawGrid(); } } //////////////////////////////////////////////////////////// // Grid/Record mode public GridMode getGridMode() { return gridMode; } public void toggleMode() { changeMode(gridMode == GridMode.GRID ? GridMode.RECORD : GridMode.GRID); // Refresh elements ICommandService commandService = (ICommandService) site.getService(ICommandService.class); if (commandService != null) { commandService.refreshElements(ResultSetCommandHandler.CMD_TOGGLE_MODE, null); } } private void changeMode(GridMode gridMode) { int oldRowNum = this.curRowNum, oldColNum = this.curColNum; int rowCount = model.getRowCount(); if (rowCount > 0) { // Fix row number if needed if (oldRowNum < 0) { oldRowNum = this.curRowNum = 0; } else if (oldRowNum >= rowCount) { oldRowNum = this.curRowNum = rowCount - 1; } } this.gridMode = gridMode; if (this.gridMode == GridMode.GRID) { this.spreadsheet.setRowHeaderWidth(DEFAULT_ROW_HEADER_WIDTH); this.initResultSet(); } else { this.resetRecordHeaderWidth(); this.updateRecordMode(); } if (gridMode == GridMode.GRID) { if (oldRowNum >= 0 && oldRowNum < spreadsheet.getItemCount()) { spreadsheet.setCursor(new GridPos(oldColNum, oldRowNum), false); } } else { if (oldColNum >= 0) { spreadsheet.setCursor(new GridPos(0, oldColNum), false); } } spreadsheet.layout(true, true); previewValue(); } private void resetRecordHeaderWidth() { // Calculate width of spreadsheet panel - use longest column title int defaultWidth = 0; GC gc = new GC(spreadsheet); gc.setFont(spreadsheet.getFont()); for (DBDAttributeBinding column : model.getVisibleColumns()) { Point ext = gc.stringExtent(column.getAttributeName()); if (ext.x > defaultWidth) { defaultWidth = ext.x; } } defaultWidth += DBIcon.EDIT_COLUMN.getImage().getBounds().width + 2; spreadsheet.setRowHeaderWidth(defaultWidth + DEFAULT_ROW_HEADER_WIDTH); } //////////////////////////////////////////////////////////// // Value preview public boolean isPreviewVisible() { return resultsSash.getMaximizedControl() == null; } public void togglePreview() { if (resultsSash.getMaximizedControl() == null) { resultsSash.setMaximizedControl(spreadsheet); } else { resultsSash.setMaximizedControl(null); previewValue(); } getPreferences().setValue(VIEW_PANEL_VISIBLE, isPreviewVisible()); // Refresh elements ICommandService commandService = (ICommandService) site.getService(ICommandService.class); if (commandService != null) { commandService.refreshElements(ResultSetCommandHandler.CMD_TOGGLE_PREVIEW, null); } } void previewValue() { if (!isPreviewVisible()) { return; } GridPos cell = getCurrentPosition(); if (!isValidCell(cell)) { previewPane.clearValue(); return; } cell = translateVisualPos(getCurrentPosition()); if (panelValueController == null || panelValueController.pos.col != cell.col) { panelValueController = new ResultSetValueController( cell, DBDValueController.EditType.PANEL, previewPane.getViewPlaceholder()); } else { panelValueController.setCurRow(model.getRowData(cell.row)); } previewPane.viewValue(panelValueController); } //////////////////////////////////////////////////////////// // Misc private void dispose() { closeEditors(); clearData(); themeManager.removePropertyChangeListener(ResultSetViewer.this); UIUtils.dispose(this.boldFont); if (toolBarManager != null) { try { toolBarManager.dispose(); } catch (Throwable e) { // ignore log.debug("Error disposing toolbar", e); } } } private void applyThemeSettings() { ITheme currentTheme = themeManager.getCurrentTheme(); Font rsFont = currentTheme.getFontRegistry().get(ThemeConstants.FONT_SQL_RESULT_SET); if (rsFont != null) { this.spreadsheet.setFont(rsFont); } Color selBackColor = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_SET_SELECTION_BACK); if (selBackColor != null) { this.spreadsheet.setBackgroundSelected(selBackColor); } Color selForeColor = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_SET_SELECTION_FORE); if (selForeColor != null) { this.spreadsheet.setForegroundSelected(selForeColor); } Color previewBack = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_SET_PREVIEW_BACK); if (previewBack != null) { this.previewPane.getViewPlaceholder().setBackground(previewBack); for (Control control : this.previewPane.getViewPlaceholder().getChildren()) { control.setBackground(previewBack); } } this.backgroundAdded = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_NEW_BACK); this.backgroundDeleted = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_DELETED_BACK); this.backgroundModified = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_MODIFIED_BACK); this.backgroundOdd = currentTheme.getColorRegistry().get(ThemeConstants.COLOR_SQL_RESULT_CELL_ODD_BACK); } @Override public void propertyChange(PropertyChangeEvent event) { if (event.getProperty().startsWith(ThemeConstants.RESULTS_PROP_PREFIX)) { applyThemeSettings(); } } void scrollToRow(RowPosition position) { switch (position) { case FIRST: if (gridMode == GridMode.RECORD) { curRowNum = 0; updateRecordMode(); } else { spreadsheet.shiftCursor(0, -spreadsheet.getItemCount(), false); } break; case PREVIOUS: if (gridMode == GridMode.RECORD && curRowNum > 0) { curRowNum--; updateRecordMode(); } else { spreadsheet.shiftCursor(0, -1, false); } break; case NEXT: if (gridMode == GridMode.RECORD && curRowNum < model.getRowCount() - 1) { curRowNum++; updateRecordMode(); } else { spreadsheet.shiftCursor(0, 1, false); } break; case LAST: if (gridMode == GridMode.RECORD && model.getRowCount() > 0) { curRowNum = model.getRowCount() - 1; updateRecordMode(); } else { spreadsheet.shiftCursor(0, spreadsheet.getItemCount(), false); } break; } } boolean isColumnReadOnly(GridPos pos) { int column; if (gridMode == GridMode.GRID) { column = pos.col; } else { column = pos.row; } return isReadOnly() || model.isColumnReadOnly(column); } boolean isColumnReadOnly(DBDAttributeBinding column) { return isReadOnly() || model.isColumnReadOnly(column); } public int getCurrentRow() { return gridMode == GridMode.GRID ? spreadsheet.getCurrentRow() : curRowNum; } public GridPos getCurrentPosition() { return spreadsheet.getCursorPosition(); } public void setStatus(String status) { setStatus(status, false); } public void setStatus(String status, boolean error) { if (statusLabel.isDisposed()) { return; } if (error) { statusLabel.setForeground(colorRed); } else { statusLabel.setForeground(null); } if (status == null) { status = "???"; //$NON-NLS-1$ } statusLabel.setText(status); } public void updateStatusMessage() { if (model.getRowCount() == 0) { if (model.getVisibleColumnCount() == 0) { setStatus(CoreMessages.controls_resultset_viewer_status_empty + getExecutionTimeMessage()); } else { setStatus(CoreMessages.controls_resultset_viewer_status_no_data + getExecutionTimeMessage()); } } else { if (gridMode == GridMode.RECORD) { this.resetRecordHeaderWidth(); setStatus(CoreMessages.controls_resultset_viewer_status_row + (curRowNum + 1) + "/" + model.getRowCount() + getExecutionTimeMessage()); } else { setStatus(String.valueOf(model.getRowCount()) + CoreMessages.controls_resultset_viewer_status_rows_fetched + getExecutionTimeMessage()); } } } private String getExecutionTimeMessage() { DBCStatistics statistics = model.getStatistics(); if (statistics == null || statistics.isEmpty()) { return ""; } return " - " + RuntimeUtils.formatExecutionTime(statistics.getTotalTime()); } /** * Sets new metadata of result set * @param columns columns metadata * @return true if new metadata differs from old one, false otherwise */ public boolean setMetaData(DBDAttributeBinding[] columns) { if (model.setMetaData(columns)) { this.panelValueController = null; return true; } return false; } public void setData(List<Object[]> rows, boolean updateMetaData) { if (spreadsheet.isDisposed()) { return; } // Clear previous data this.closeEditors(); model.setData(rows, updateMetaData); if (updateMetaData) { if (getPreferenceStore().getBoolean(DBeaverPreferences.RESULT_SET_AUTO_SWITCH_MODE)) { GridMode newMode = (rows.size() == 1) ? GridMode.RECORD : GridMode.GRID; if (newMode != gridMode) { toggleMode(); // ResultSetPropertyTester.firePropertyChange(ResultSetPropertyTester.PROP_CAN_TOGGLE); } } this.initResultSet(); } else { this.refreshSpreadsheet(updateMetaData, true); } updateEditControls(); } public void appendData(List<Object[]> rows) { model.appendData(rows); //refreshSpreadsheet(true); spreadsheet.refreshData(false); setStatus(NLS.bind(CoreMessages.controls_resultset_viewer_status_rows_size, model.getRowCount(), rows.size()) + getExecutionTimeMessage()); } private void closeEditors() { List<DBDValueEditorStandalone> editors = new ArrayList<DBDValueEditorStandalone>(openEditors.values()); for (DBDValueEditorStandalone editor : editors) { editor.closeValueEditor(); } if (!openEditors.isEmpty()) { log.warn("Some value editors are still registered at result set: " + openEditors.size()); } openEditors.clear(); } private void initResultSet() { spreadsheet.setRedraw(false); try { spreadsheet.clearGrid(); if (gridMode == GridMode.RECORD) { this.resetRecordHeaderWidth(); } spreadsheet.reinitState(true); } finally { spreadsheet.setRedraw(true); } this.updateFiltersText(); this.updateStatusMessage(); } @Override public int promptToSaveOnClose() { if (!isDirty()) { return ISaveablePart2.YES; } int result = ConfirmationDialog.showConfirmDialog( spreadsheet.getShell(), DBeaverPreferences.CONFIRM_RS_EDIT_CLOSE, ConfirmationDialog.QUESTION_WITH_CANCEL); if (result == IDialogConstants.YES_ID) { return ISaveablePart2.YES; } else if (result == IDialogConstants.NO_ID) { rejectChanges(); return ISaveablePart2.NO; } else { return ISaveablePart2.CANCEL; } } @Override public void doSave(IProgressMonitor monitor) { applyChanges(RuntimeUtils.makeMonitor(monitor)); } @Override public void doSaveAs() { } @Override public boolean isDirty() { return model.isDirty(); } @Override public boolean isSaveAsAllowed() { return false; } @Override public boolean isSaveOnCloseNeeded() { return true; } @Override public boolean hasData() { return model.hasData(); } @Override public boolean isReadOnly() { if (model.isUpdateInProgress()) { return true; } DBSDataContainer dataContainer = getDataContainer(); if (dataContainer == null) { return true; } DBPDataSource dataSource = dataContainer.getDataSource(); return !dataSource.isConnected() || dataSource.getContainer().isConnectionReadOnly() || dataSource.getInfo().isReadOnlyData(); } /** * Checks that specified visual position is valid * @param pos visual grid position * @return true if position is valid */ @Override public boolean isValidCell(GridPos pos) { if (pos == null) { return false; } if (gridMode == GridMode.GRID) { return pos.row >= 0 && pos.row < model.getRowCount() && pos.col >= 0 && pos.col < model.getVisibleColumnCount(); } else { return curRowNum >= 0 && curRowNum < model.getRowCount() && pos.row >= 0; } } /** * Translated visual grid position into model cell position. * Check for grid mode (grid/record) and columns reordering/hiding * @param pos visual position * @return model position */ GridPos translateVisualPos(GridPos pos) { if (gridMode == GridMode.GRID) { DBDAttributeBinding column = model.getVisibleColumn(pos.col); if (column.getAttributeIndex() == pos.col) { return pos; } else { return new GridPos(column.getAttributeIndex(), pos.row); } } else { DBDAttributeBinding column = model.getVisibleColumn(pos.row); return new GridPos(column.getAttributeIndex(), curRowNum); } } /** * Checks that current state of result set allows to insert new rows * @return true if new rows insert is allowed */ @Override public boolean isInsertable() { return !isReadOnly() && model.isSingleSource() && model.getVisibleColumnCount() > 0; } @Nullable @Override public Control showCellEditor( final boolean inline) { // The control that will be the editor must be a child of the Table final GridPos focusCell = spreadsheet.getFocusCell(); //GridPos pos = getPosFromPoint(event.x, event.y); if (focusCell == null || focusCell.row < 0 || focusCell.col < 0) { return null; } if (!isValidCell(focusCell)) { // Out of bounds log.debug("Editor position is out of bounds (" + focusCell.col + ":" + focusCell.row + ")"); return null; } GridPos cell = translateVisualPos(focusCell); if (!inline) { for (ResultSetValueController valueController : openEditors.keySet()) { GridPos cellPos = valueController.getCellPos(); if (cellPos != null && cellPos.equalsTo(cell)) { openEditors.get(valueController).showValueEditor(); return null; } } } DBDAttributeBinding metaColumn = model.getColumn(cell.col); final int handlerFeatures = metaColumn.getValueHandler().getFeatures(); if (handlerFeatures == DBDValueHandler.FEATURE_NONE) { return null; } if (inline && (handlerFeatures & DBDValueHandler.FEATURE_INLINE_EDITOR) == 0 && (handlerFeatures & DBDValueHandler.FEATURE_VIEWER) != 0) { // Inline editor isn't supported but panel viewer is // Enable panel if (!isPreviewVisible()) { togglePreview(); } return null; } if (isColumnReadOnly(metaColumn) && inline) { // No inline editors for readonly columns return null; } Composite placeholder = null; if (inline) { if (isReadOnly()) { return null; } spreadsheet.cancelInlineEditor(); placeholder = new Composite(spreadsheet, SWT.NONE); placeholder.setFont(spreadsheet.getFont()); placeholder.setLayout(new FillLayout()); GridData gd = new GridData(GridData.FILL_BOTH); gd.horizontalIndent = 0; gd.verticalIndent = 0; gd.grabExcessHorizontalSpace = true; gd.grabExcessVerticalSpace = true; placeholder.setLayoutData(gd); } ResultSetValueController valueController = new ResultSetValueController( cell, inline ? DBDValueController.EditType.INLINE : DBDValueController.EditType.EDITOR, inline ? placeholder : null); final DBDValueEditor editor; try { editor = metaColumn.getValueHandler().createEditor(valueController); } catch (Exception e) { UIUtils.showErrorDialog(site.getShell(), "Cannot edit value", null, e); return null; } if (editor instanceof DBDValueEditorStandalone) { valueController.registerEditor((DBDValueEditorStandalone)editor); // show dialog in separate job to avoid block new UIJob("Open separate editor") { @Override public IStatus runInUIThread(IProgressMonitor monitor) { ((DBDValueEditorStandalone)editor).showValueEditor(); return Status.OK_STATUS; } }.schedule(); //((DBDValueEditorStandalone)editor).showValueEditor(); } else { // Set editable value if (editor != null) { try { editor.primeEditorValue(valueController.getValue()); } catch (DBException e) { log.error(e); } } } if (inline) { if (editor != null) { spreadsheet.showCellEditor(focusCell, placeholder); return editor.getControl(); } else { // No editor was created so just drop placeholder placeholder.dispose(); // Probably we can just show preview panel if ((handlerFeatures & DBDValueHandler.FEATURE_VIEWER) != 0) { // Inline editor isn't supported but panel viewer is // Enable panel if (!isPreviewVisible()) { togglePreview(); } return null; } } } return null; } @Override public void resetCellValue(GridPos cell, boolean delete) { cell = translateVisualPos(cell); model.resetCellValue(cell, delete); spreadsheet.redrawGrid(); updateEditControls(); previewValue(); } @Override public void fillContextMenu(GridPos curCell, IMenuManager manager) { // Custom oldValue items if (isValidCell(curCell)) { final GridPos cell = translateVisualPos(curCell); final ResultSetValueController valueController = new ResultSetValueController( cell, DBDValueController.EditType.NONE, null); final Object value = valueController.getValue(); // Standard items manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_CUT)); manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_COPY)); manager.add(ActionUtils.makeCommandContribution(site, ICommandIds.CMD_COPY_SPECIAL)); manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_PASTE)); manager.add(ActionUtils.makeCommandContribution(site, IWorkbenchCommandConstants.EDIT_DELETE)); // Edit items manager.add(new Separator()); manager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_EDIT)); manager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_ROW_EDIT_INLINE)); if (!valueController.isReadOnly() && !DBUtils.isNullValue(value)) { manager.add(new Action(CoreMessages.controls_resultset_viewer_action_set_to_null) { @Override public void run() { valueController.updateValue( DBUtils.makeNullValue(valueController)); } }); } if (model.isCellModified(cell)) { Action resetValueAction = new Action(CoreMessages.controls_resultset_viewer_action_reset_value) { @Override public void run() { resetCellValue(cell, false); } }; resetValueAction.setAccelerator(SWT.ESC); manager.add(resetValueAction); } // Menus from value handler try { manager.add(new Separator()); model.getColumn(cell.col).getValueHandler().contributeActions(manager, valueController); } catch (Exception e) { log.error(e); } } if (curCell.col >= 0 && model.getVisibleColumnCount() > 0 && !model.isUpdateInProgress()) { // Export and other utility methods manager.add(new Separator()); MenuManager filtersMenu = new MenuManager( CoreMessages.controls_resultset_viewer_action_order_filter, DBIcon.FILTER.getImageDescriptor(), "filters"); //$NON-NLS-1$ filtersMenu.setRemoveAllWhenShown(true); filtersMenu.addMenuListener(new IMenuListener() { @Override public void menuAboutToShow(IMenuManager manager) { fillFiltersMenu(manager); } }); manager.add(filtersMenu); manager.add(new Action(CoreMessages.controls_resultset_viewer_action_export, DBIcon.EXPORT.getImageDescriptor()) { @Override public void run() { ActiveWizardDialog dialog = new ActiveWizardDialog( site.getWorkbenchWindow(), new DataTransferWizard( new IDataTransferProducer[] { new DatabaseTransferProducer(getDataContainer(), model.getDataFilter())}, null), getSelection()); dialog.open(); } }); } manager.add(new GroupMarker(ICommandIds.GROUP_TOOLS)); } private void fillFiltersMenu(IMenuManager filtersMenu) { GridPos currentPosition = getCurrentPosition(); int columnIndex = translateVisualPos(currentPosition).col; if (supportsDataFilter() && columnIndex >= 0) { DBDAttributeBinding column = model.getColumn(columnIndex); DBPDataKind dataKind = column.getMetaAttribute().getDataKind(); if (!column.getMetaAttribute().isRequired()) { filtersMenu.add(new FilterByColumnAction("IS NULL", FilterByColumnType.NONE, column)); filtersMenu.add(new FilterByColumnAction("IS NOT NULL", FilterByColumnType.NONE, column)); } for (FilterByColumnType type : FilterByColumnType.values()) { if (type == FilterByColumnType.NONE || (type == FilterByColumnType.VALUE && !isValidCell(currentPosition))) { // Value filters are available only if certain cell is selected continue; } filtersMenu.add(new Separator()); if (type.getValue(this, column, true, DBDDisplayFormat.NATIVE) == null) { continue; } if (dataKind == DBPDataKind.BOOLEAN) { filtersMenu.add(new FilterByColumnAction("= ?", type, column)); filtersMenu.add(new FilterByColumnAction("<> ?", type, column)); } else if (dataKind == DBPDataKind.NUMERIC || dataKind == DBPDataKind.DATETIME) { filtersMenu.add(new FilterByColumnAction("= ?", type, column)); filtersMenu.add(new FilterByColumnAction("<> ?", type, column)); filtersMenu.add(new FilterByColumnAction("> ?", type, column)); filtersMenu.add(new FilterByColumnAction("< ?", type, column)); } else if (dataKind == DBPDataKind.STRING) { filtersMenu.add(new FilterByColumnAction("= '?'", type, column)); filtersMenu.add(new FilterByColumnAction("<> '?'", type, column)); filtersMenu.add(new FilterByColumnAction("> '?'", type, column)); filtersMenu.add(new FilterByColumnAction("< '?'", type, column)); filtersMenu.add(new FilterByColumnAction("LIKE '%?%'", type, column)); filtersMenu.add(new FilterByColumnAction("NOT LIKE '%?%'", type, column)); } } filtersMenu.add(new Separator()); if (!CommonUtils.isEmpty(model.getDataFilter().getConstraint(column).getCriteria())) { filtersMenu.add(new FilterResetColumnAction(column)); } } { final List<GridColumn> selectedColumns = getSpreadsheet().getSelectedColumns(); if (getGridMode() == GridMode.GRID && !selectedColumns.isEmpty()) { String hideTitle; if (selectedColumns.size() == 1) { DBDAttributeBinding columnToHide = model.getColumn(translateVisualPos( new GridPos(selectedColumns.get(0).getIndex(), -1)).col); hideTitle = "Hide column '" + columnToHide.getAttributeName() + "'"; } else { hideTitle = "Hide selected columns (" + selectedColumns.size() + ")"; } filtersMenu.add(new Action(hideTitle) { @Override public void run() { if (selectedColumns.size() >= getModel().getVisibleColumnCount()) { UIUtils.showMessageBox(getControl().getShell(), "Hide columns", "Can't hide all result columns, at least one column must be visible", SWT.ERROR); } else { int[] columnIndexes = new int[selectedColumns.size()]; for (int i = 0, selectedColumnsSize = selectedColumns.size(); i < selectedColumnsSize; i++) { columnIndexes[i] = selectedColumns.get(i).getIndex(); } Arrays.sort(columnIndexes); for (int i = columnIndexes.length; i > 0; i--) { getModel().setColumnVisibility(getModel().getVisibleColumn(columnIndexes[i - 1]), false); } refreshSpreadsheet(true, true); } } }); } } filtersMenu.add(new Separator()); filtersMenu.add(new ToggleServerSideOrderingAction()); filtersMenu.add(new ShowFiltersAction()); } boolean supportsDataFilter() { return (getDataContainer().getSupportedFeatures() & DBSDataContainer.DATA_FILTER) == DBSDataContainer.DATA_FILTER; } @Override public void changeSorting(final GridColumn column, final int state) { DBDDataFilter dataFilter = model.getDataFilter(); boolean ctrlPressed = (state & SWT.CTRL) == SWT.CTRL; boolean altPressed = (state & SWT.ALT) == SWT.ALT; if (ctrlPressed) { dataFilter.resetOrderBy(); } DBDAttributeBinding metaColumn = model.getVisibleColumn(column.getIndex()); DBDAttributeConstraint constraint = dataFilter.getConstraint(metaColumn); //int newSort; if (constraint.getOrderPosition() == 0) { if (isServerSideFiltering() && supportsDataFilter()) { if (!ConfirmationDialog.confirmActionWithParams( spreadsheet.getShell(), DBeaverPreferences.CONFIRM_ORDER_RESULTSET, metaColumn.getAttributeName())) { return; } } constraint.setOrderPosition(dataFilter.getMaxOrderingPosition() + 1); constraint.setOrderDescending(altPressed); } else if (!constraint.isOrderDescending()) { constraint.setOrderDescending(true); } else { for (DBDAttributeConstraint con2 : dataFilter.getConstraints()) { if (con2.getOrderPosition() > constraint.getOrderPosition()) { con2.setOrderPosition(con2.getOrderPosition() - 1); } } constraint.setOrderPosition(0); constraint.setOrderDescending(false); } // Reorder // Use forced reorder if we just removed ordering on some column reorderResultSet(constraint.getOrderPosition() == 0, new Runnable() { @Override public void run() { resetColumnOrdering(); } }); } @Override public Control getControl() { return this.viewerPanel; } public IWorkbenchPartSite getSite() { return site; } public ResultSetModel getModel() { return model; } @Override public ResultSetModel getInput() { return model; } @Override public void setInput(Object input) { throw new IllegalArgumentException("ResultSet model can't be changed"); } @Override public ResultSetSelection getSelection() { return new ResultSetSelectionImpl(); } @Override public void setSelection(ISelection selection, boolean reveal) { if (selection instanceof ResultSetSelectionImpl && ((ResultSetSelectionImpl) selection).getResultSetViewer() == this) { // It may occur on simple focus change so we won't do anything return; } spreadsheet.deselectAllCells(); if (!selection.isEmpty() && selection instanceof IStructuredSelection) { List<GridPos> cellSelection = new ArrayList<GridPos>(); for (Iterator iter = ((IStructuredSelection) selection).iterator(); iter.hasNext(); ) { Object cell = iter.next(); if (cell instanceof GridPos) { cellSelection.add((GridPos) cell); } else { log.warn("Bad selection object: " + cell); } } spreadsheet.selectCells(cellSelection); if (reveal) { spreadsheet.showSelection(); } } fireSelectionChanged(new SelectionChangedEvent(this, selection)); } public DBDDataReceiver getDataReceiver() { return dataReceiver; } @Override public void refresh() { // Check if we are dirty if (isDirty()) { switch (promptToSaveOnClose()) { case ISaveablePart2.CANCEL: return; case ISaveablePart2.YES: // Apply changes applyChanges(null, new ResultSetPersister.DataUpdateListener() { @Override public void onUpdate(boolean success) { if (success) { getControl().getDisplay().asyncExec(new Runnable() { @Override public void run() { refresh(); } }); } } }); return; default: // Just ignore previous RS values break; } } // Cache preferences IPreferenceStore preferenceStore = getPreferenceStore(); showOddRows = preferenceStore.getBoolean(DBeaverPreferences.RESULT_SET_SHOW_ODD_ROWS); showCelIcons = preferenceStore.getBoolean(DBeaverPreferences.RESULT_SET_SHOW_CELL_ICONS); // Pump data int oldRowNum = curRowNum; int oldColNum = curColNum; if (resultSetProvider != null && resultSetProvider.isReadyToRun() && getDataContainer() != null && dataPumpJob == null) { int segmentSize = getSegmentMaxRows(); if (oldRowNum >= segmentSize && segmentSize > 0) { segmentSize = (oldRowNum / segmentSize + 1) * segmentSize; } runDataPump(0, segmentSize, new GridPos(oldColNum, oldRowNum), new Runnable() { @Override public void run() { if (!supportsDataFilter() && !model.getDataFilter().hasOrdering()) { reorderLocally(); } } }); } } private boolean isServerSideFiltering() { return getPreferenceStore().getBoolean(DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE) && (dataReceiver.isHasMoreData() || !CommonUtils.isEmpty(model.getDataFilter().getOrder())); } private void reorderResultSet(boolean force, Runnable onSuccess) { if (force || isServerSideFiltering() && supportsDataFilter()) { if (resultSetProvider != null && resultSetProvider.isReadyToRun() && getDataContainer() != null && dataPumpJob == null) { int segmentSize = getSegmentMaxRows(); if (curRowNum >= segmentSize && segmentSize > 0) { segmentSize = (curRowNum / segmentSize + 1) * segmentSize; } runDataPump(0, segmentSize, new GridPos(curColNum, curRowNum), onSuccess); } return; } try { reorderLocally(); } finally { if (onSuccess != null) { onSuccess.run(); } } } private void reorderLocally() { rejectChanges(); model.resetOrdering(); } synchronized void readNextSegment() { if (!dataReceiver.isHasMoreData()) { return; } if (getDataContainer() != null && !model.isUpdateInProgress() && dataPumpJob == null) { dataReceiver.setHasMoreData(false); dataReceiver.setNextSegmentRead(true); runDataPump(model.getRowCount(), getSegmentMaxRows(), null, null); } } int getSegmentMaxRows() { if (getDataContainer() == null) { return 0; } return getPreferenceStore().getInt(DBeaverPreferences.RESULT_SET_MAX_ROWS); } public IPreferenceStore getPreferenceStore() { DBPDataSource dataSource = getDataSource(); if (dataSource != null) { return dataSource.getContainer().getPreferenceStore(); } return DBeaverCore.getGlobalPreferenceStore(); } private synchronized void runDataPump( final int offset, final int maxRows, @Nullable final GridPos oldPos, @Nullable final Runnable finalizer) { if (dataPumpJob == null) { dataPumpJob = new ResultSetDataPumpJob(this); dataPumpJob.addJobChangeListener(new JobChangeAdapter() { @Override public void done(IJobChangeEvent event) { ResultSetDataPumpJob job = (ResultSetDataPumpJob)event.getJob(); final Throwable error = job.getError(); if (job.getStatistics() != null) { model.setStatistics(job.getStatistics()); } Display.getDefault().asyncExec(new Runnable() { @Override public void run() { Control control = getControl(); if (control == null || control.isDisposed()) { return; } final Shell shell = control.getShell(); if (error != null) { setStatus(error.getMessage(), true); UIUtils.showErrorDialog( shell, "Error executing query", "Query execution failed", error); } else if (oldPos != null) { // Seems to be refresh // Restore original position ResultSetViewer.this.curRowNum = Math.min(oldPos.row, model.getRowCount() - 1); ResultSetViewer.this.curColNum = Math.min(oldPos.col, model.getVisibleColumnCount() - 1); GridPos newPos; if (gridMode == GridMode.GRID) { newPos = new GridPos(curColNum, curRowNum); } else { if (ResultSetViewer.this.curRowNum < 0 && model.getRowCount() > 0) { ResultSetViewer.this.curRowNum = 0; } newPos = new GridPos(0, curColNum); } spreadsheet.setCursor(newPos, false); updateStatusMessage(); previewValue(); } else { spreadsheet.redraw(); } updateFiltersText(); if (finalizer != null) { finalizer.run(); } dataPumpJob = null; } }); } }); dataPumpJob.setOffset(offset); dataPumpJob.setMaxRows(maxRows); dataPumpJob.schedule(); } } private void clearData() { model.clearData(); this.curRowNum = -1; this.curColNum = -1; } public void applyChanges(@Nullable DBRProgressMonitor monitor) { applyChanges(monitor, null); } /** * Saves changes to database * @param monitor monitor. If null then save will be executed in async job * @param listener finish listener (may be null) */ public void applyChanges(@Nullable DBRProgressMonitor monitor, @Nullable ResultSetPersister.DataUpdateListener listener) { if (!model.isSingleSource()) { UIUtils.showErrorDialog(getControl().getShell(), "Apply changes error", "Can't save data for result set from multiple sources"); return; } try { if (!model.getRemovedRows().isEmpty() || !model.getEditedValues().isEmpty()) { // If we have deleted or updated rows then check for unique identifier if (!checkVirtualEntityIdentifier()) { //UIUtils.showErrorDialog(getControl().getShell(), "Can't apply changes", "Can't apply data changes - not unique identifier defined"); return; } } new ResultSetPersister(this).applyChanges(monitor, listener); } catch (DBException e) { UIUtils.showErrorDialog(getControl().getShell(), "Apply changes error", "Error saving changes in database", e); } } public void rejectChanges() { new ResultSetPersister(this).rejectChanges(); } public void copySelectionToClipboard( boolean copyHeader, boolean copyRowNumbers, boolean cut, String delimiter, DBDDisplayFormat format) { if (delimiter == null) { delimiter = "\t"; } String lineSeparator = ContentUtils.getDefaultLineSeparator(); List<Integer> colsSelected = new ArrayList<Integer>(); int firstCol = Integer.MAX_VALUE, lastCol = Integer.MIN_VALUE; int firstRow = Integer.MAX_VALUE; Collection<GridPos> selection = spreadsheet.getSelection(); for (GridPos pos : selection) { if (firstCol > pos.col) { firstCol = pos.col; } if (lastCol < pos.col) { lastCol = pos.col; } if (firstRow > pos.row) { firstRow = pos.row; } if (!colsSelected.contains(pos.col)) { colsSelected.add(pos.col); } } ILabelProvider rowLabelProvider = this.spreadsheet.getRowLabelProvider(); int rowNumber = 0; StringBuilder tdt = new StringBuilder(); if (copyHeader) { if (copyRowNumbers) { tdt.append(rowLabelProvider.getText(-1)); } for (int colIndex : colsSelected) { GridColumn column = spreadsheet.getColumn(colIndex); if (tdt.length() > 0) { tdt.append(delimiter); } tdt.append(column.getText()); } tdt.append(lineSeparator); } if (copyRowNumbers) { tdt.append(rowLabelProvider.getText(rowNumber++)).append(delimiter); } int prevRow = firstRow; int prevCol = firstCol; for (GridPos pos : selection) { if (pos.row > prevRow) { if (prevCol < lastCol) { for (int i = prevCol; i < lastCol; i++) { if (colsSelected.contains(i)) { tdt.append(delimiter); } } } tdt.append(lineSeparator); if (copyRowNumbers) { tdt.append(rowLabelProvider.getText(rowNumber++)).append(delimiter); } prevRow = pos.row; prevCol = firstCol; } if (pos.col > prevCol) { for (int i = prevCol; i < pos.col; i++) { if (colsSelected.contains(i)) { tdt.append(delimiter); } } prevCol = pos.col; } GridPos cellPos = translateVisualPos(pos); Object[] curRow = model.getRowData(cellPos.row); Object value = curRow[cellPos.col]; DBDAttributeBinding column = model.getColumn(cellPos.col); String cellText = column.getValueHandler().getValueDisplayString( column.getMetaAttribute(), value, format); if (cellText != null) { tdt.append(cellText); } if (cut) { DBDValueController valueController = new ResultSetValueController( cellPos, DBDValueController.EditType.NONE, null); if (!valueController.isReadOnly()) { valueController.updateValue(DBUtils.makeNullValue(valueController)); } } } if (tdt.length() > 0) { TextTransfer textTransfer = TextTransfer.getInstance(); getSpreadsheet().getClipboard().setContents( new Object[]{tdt.toString()}, new Transfer[]{textTransfer}); } } public void pasteCellValue() { GridPos cell = getCurrentPosition(); if (cell == null) { return; } cell = translateVisualPos(cell); DBDAttributeBinding metaColumn = model.getColumn(cell.col); if (isColumnReadOnly(metaColumn)) { // No inline editors for readonly columns return; } try { Object newValue = getColumnValueFromClipboard(metaColumn); if (newValue == null) { return; } new ResultSetValueController( cell, DBDValueController.EditType.NONE, null).updateValue(newValue); } catch (Exception e) { UIUtils.showErrorDialog(site.getShell(), "Cannot replace cell value", null, e); } } @Nullable private Object getColumnValueFromClipboard(DBDAttributeBinding metaColumn) throws DBCException { DBPDataSource dataSource = getDataSource(); if (dataSource == null) { return null; } DBCSession session = dataSource.openSession(VoidProgressMonitor.INSTANCE, DBCExecutionPurpose.UTIL, "Copy from clipboard"); try { String strValue = (String) getSpreadsheet().getClipboard().getContents(TextTransfer.getInstance()); return metaColumn.getValueHandler().getValueFromObject( session, metaColumn.getMetaAttribute(), strValue, true); } finally { session.close(); } } void addNewRow(final boolean copyCurrent) { GridPos curPos = spreadsheet.getCursorPosition(); int rowNum; if (gridMode == GridMode.RECORD) { rowNum = this.curRowNum; } else { rowNum = curPos.row; } if (rowNum < 0) { rowNum = 0; } model.shiftRows(rowNum, 1); final DBPDataSource dataSource = getDataSource(); if (dataSource == null) { return; } // Add new row final DBDAttributeBinding[] columns = model.getColumns(); final Object[] cells = new Object[columns.length]; final int currentRowNumber = rowNum; try { DBeaverUI.runInProgressService(new DBRRunnableWithProgress() { @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { // Copy cell values in new context DBCSession session = dataSource.openSession(monitor, DBCExecutionPurpose.UTIL, CoreMessages.controls_resultset_viewer_add_new_row_context_name); try { if (copyCurrent && currentRowNumber >= 0 && currentRowNumber < model.getRowCount()) { Object[] origRow = model.getRowData(currentRowNumber); for (int i = 0; i < columns.length; i++) { DBDAttributeBinding metaColumn = columns[i]; DBSAttributeBase attribute = metaColumn.getAttribute(); if (attribute.isAutoGenerated() || attribute.isPseudoAttribute()) { // set pseudo and autoincrement columns to null cells[i] = null; } else { try { cells[i] = metaColumn.getValueHandler().getValueFromObject(session, attribute, origRow[i], true); } catch (DBCException e) { log.warn(e); try { cells[i] = DBUtils.makeNullValue(session, metaColumn.getValueHandler(), attribute); } catch (DBCException e1) { log.warn(e1); } } } } } else { // Initialize new values for (int i = 0; i < columns.length; i++) { DBDAttributeBinding metaColumn = columns[i]; try { cells[i] = DBUtils.makeNullValue(session, metaColumn.getValueHandler(), metaColumn.getAttribute()); } catch (DBCException e) { log.warn(e); } } } } finally { session.close(); } } }); } catch (InvocationTargetException e) { log.error("Could not create new row", e.getTargetException()); } catch (InterruptedException e) { // interrupted - do nothing } model.addNewRow(rowNum, cells); refreshSpreadsheet(false, true); updateEditControls(); fireResultSetChange(); } void deleteSelectedRows() { GridPos curPos = spreadsheet.getCursorPosition(); TreeSet<Integer> rowNumbers = new TreeSet<Integer>(); if (gridMode == GridMode.RECORD) { rowNumbers.add(this.curRowNum); } else { for (GridPos pos : spreadsheet.getSelection()) { rowNumbers.add(pos.row); } } for (Iterator<Integer> iter = rowNumbers.iterator(); iter.hasNext(); ) { int rowNum = iter.next(); if (rowNum < 0 || rowNum >= model.getRowCount()) { iter.remove(); } } if (rowNumbers.isEmpty()) { return; } int rowsRemoved = 0; int lastRowNum = -1; for (Iterator<Integer> iter = rowNumbers.descendingIterator(); iter.hasNext(); ) { int rowNum = iter.next(); if (rowNum > lastRowNum) { lastRowNum = rowNum; } if (model.deleteRow(rowNum)) { rowsRemoved++; } } // Move one row down (if we are in grid mode) if (gridMode == GridMode.GRID && lastRowNum < spreadsheet.getItemCount() - 1) { curPos.row = lastRowNum - rowsRemoved + 1; spreadsheet.setCursor(curPos, false); } if (rowsRemoved > 0) { refreshSpreadsheet(false, true); } else { spreadsheet.redrawGrid(); } updateEditControls(); fireResultSetChange(); } static Image getTypeImage(DBSTypedObject column) { if (column instanceof IObjectImageProvider) { return ((IObjectImageProvider)column).getObjectImage(); } else { return DBIcon.TREE_COLUMN.getImage(); } } ////////////////////////////////// // Virtual identifier management @Nullable DBCEntityIdentifier getVirtualEntityIdentifier() { if (!model.isSingleSource() || model.getVisibleColumnCount() == 0) { return null; } DBDRowIdentifier rowIdentifier = model.getVisibleColumn(0).getRowIdentifier(); DBCEntityIdentifier identifier = rowIdentifier == null ? null : rowIdentifier.getEntityIdentifier(); if (identifier != null && identifier.getReferrer() instanceof DBVEntityConstraint) { return identifier; } else { return null; } } boolean checkVirtualEntityIdentifier() throws DBException { // Check for value locators // Probably we have only virtual one with empty column set final DBCEntityIdentifier identifier = getVirtualEntityIdentifier(); if (identifier != null) { if (CommonUtils.isEmpty(identifier.getAttributes())) { // Empty identifier. We have to define it RunnableWithResult<Boolean> confirmer = new RunnableWithResult<Boolean>() { @Override public void run() { result = ValidateUniqueKeyUsageDialog.validateUniqueKey(ResultSetViewer.this); } }; UIUtils.runInUI(getControl().getShell(), confirmer); return confirmer.getResult(); } } return true; } boolean editEntityIdentifier(DBRProgressMonitor monitor) throws DBException { DBCEntityIdentifier virtualEntityIdentifier = getVirtualEntityIdentifier(); if (virtualEntityIdentifier == null) { log.warn("No virtual identifier"); return false; } DBVEntityConstraint constraint = (DBVEntityConstraint) virtualEntityIdentifier.getReferrer(); EditConstraintDialog dialog = new EditConstraintDialog( getControl().getShell(), "Define virtual unique identifier", constraint); if (dialog.open() != IDialogConstants.OK_ID) { return false; } Collection<DBSEntityAttribute> uniqueColumns = dialog.getSelectedColumns(); constraint.setAttributes(uniqueColumns); virtualEntityIdentifier = getVirtualEntityIdentifier(); if (virtualEntityIdentifier == null) { log.warn("No virtual identifier defined"); return false; } virtualEntityIdentifier.reloadAttributes(monitor, model.getVisibleColumn(0).getMetaAttribute().getEntity()); DBPDataSource dataSource = getDataSource(); if (dataSource != null) { dataSource.getContainer().persistConfiguration(); } return true; } void clearEntityIdentifier(DBRProgressMonitor monitor) throws DBException { DBDAttributeBinding firstColumn = model.getVisibleColumn(0); DBCEntityIdentifier identifier = firstColumn.getRowIdentifier().getEntityIdentifier(); DBVEntityConstraint virtualKey = (DBVEntityConstraint) identifier.getReferrer(); virtualKey.setAttributes(Collections.<DBSEntityAttribute>emptyList()); identifier.reloadAttributes(monitor, firstColumn.getMetaAttribute().getEntity()); virtualKey.getParentObject().setProperty(DBVConstants.PROPERTY_USE_VIRTUAL_KEY_QUIET, null); DBPDataSource dataSource = getDataSource(); if (dataSource != null) { dataSource.getContainer().persistConfiguration(); } } void fireResultSetChange() { synchronized (listeners) { if (!listeners.isEmpty()) { for (ResultSetListener listener : listeners) { listener.handleResultSetChange(); } } } } ///////////////////////////// // Value controller private class ResultSetValueController implements DBDAttributeController, DBDRowController { private final GridPos pos; private final EditType editType; private final Composite inlinePlaceholder; private Object[] curRow; private final DBDAttributeBinding column; private ResultSetValueController(GridPos pos, EditType editType, @Nullable Composite inlinePlaceholder) { this.curRow = model.getRowData(pos.row); this.pos = new GridPos(pos); this.editType = editType; this.inlinePlaceholder = inlinePlaceholder; this.column = model.getColumn(pos.col); } void setCurRow(Object[] curRow) { this.curRow = curRow; } @Nullable @Override public DBPDataSource getDataSource() { return ResultSetViewer.this.getDataSource(); } @Override public String getValueName() { return getAttribute().getName(); } @Override public DBSTypedObject getValueType() { return getAttribute(); } @Override public DBDRowController getRow() { return this; } @Override public DBCAttributeMetaData getAttribute() { return column.getMetaAttribute(); } @Override public String getColumnId() { DBPDataSource dataSource = getDataSource(); return DBUtils.getSimpleQualifiedName( dataSource == null ? null : dataSource.getContainer().getName(), getAttribute().getEntityName(), getAttribute().getName()); } @Override public Object getValue() { return curRow[pos.col]; } @Override public void updateValue(Object value) { if (model.updateCellValue(pos.row, pos.col, value)) { // Update controls site.getShell().getDisplay().syncExec(new Runnable() { @Override public void run() { updateEditControls(); spreadsheet.redrawGrid(); previewValue(); } }); } fireResultSetChange(); } @Override public DBDRowIdentifier getValueLocator() { return column.getRowIdentifier(); } @Override public DBDValueHandler getValueHandler() { return column.getValueHandler(); } @Override public EditType getEditType() { return editType; } @Override public boolean isReadOnly() { return isColumnReadOnly(column); } @Override public IWorkbenchPartSite getValueSite() { return site; } @Nullable @Override public Composite getEditPlaceholder() { return inlinePlaceholder; } @Nullable @Override public ToolBar getEditToolBar() { return isPreviewVisible() ? previewPane.getToolBar() : null; } @Override public void closeInlineEditor() { spreadsheet.cancelInlineEditor(); } @Override public void nextInlineEditor(boolean next) { spreadsheet.cancelInlineEditor(); int colOffset = next ? 1 : -1; int rowOffset = 0; //final int rowCount = spreadsheet.getItemCount(); final int colCount = spreadsheet.getColumnCount(); final GridPos curPosition = spreadsheet.getCursorPosition(); if (colOffset > 0 && curPosition.col + colOffset >= colCount) { colOffset = -colCount; rowOffset = 1; } else if (colOffset < 0 && curPosition.col + colOffset < 0) { colOffset = colCount; rowOffset = -1; } spreadsheet.shiftCursor(colOffset, rowOffset, false); showCellEditor(true); } public void registerEditor(DBDValueEditorStandalone editor) { openEditors.put(this, editor); } @Override public void unregisterEditor(DBDValueEditorStandalone editor) { openEditors.remove(this); } @Override public void showMessage(String message, boolean error) { setStatus(message, error); } @Override public Collection<DBCAttributeMetaData> getAttributesMetaData() { List<DBCAttributeMetaData> attributes = new ArrayList<DBCAttributeMetaData>(); for (DBDAttributeBinding column : model.getVisibleColumns()) { attributes.add(column.getMetaAttribute()); } return attributes; } @Nullable @Override public DBCAttributeMetaData getAttributeMetaData(DBCEntityMetaData entity, String columnName) { for (DBDAttributeBinding column : model.getVisibleColumns()) { if (column.getMetaAttribute().getEntity() == entity && column.getAttributeName().equals(columnName)) { return column.getMetaAttribute(); } } return null; } @Nullable @Override public Object getAttributeValue(DBCAttributeMetaData attribute) { DBDAttributeBinding[] columns = model.getColumns(); for (int i = 0; i < columns.length; i++) { DBDAttributeBinding metaColumn = columns[i]; if (metaColumn.getMetaAttribute() == attribute) { return curRow[i]; } } log.warn("Unknown column value requested: " + attribute); return null; } @Nullable private GridPos getCellPos() { if (pos.row >= 0) { return new GridPos(pos.col, pos.row); } else { return null; } } } static class TableRowInfo { DBSEntity table; DBCEntityIdentifier id; List<GridPos> tableCells = new ArrayList<GridPos>(); TableRowInfo(DBSEntity table, DBCEntityIdentifier id) { this.table = table; this.id = id; } } private class ContentProvider implements IGridContentProvider { @Override public int getRowCount() { return (gridMode == GridMode.RECORD) ? model.getVisibleColumnCount() : model.getRowCount(); } @Override public int getColumnCount() { return (gridMode == GridMode.RECORD) ? 1: model.getVisibleColumnCount(); } @Override public Object getElement(@NotNull GridPos pos) { pos = translateVisualPos(pos); if (gridMode == GridMode.RECORD) { return model.getRowData(curRowNum)[pos.row]; } else { return model.getRowData(pos.row)[pos.col]; } } @NotNull @Override public String getElementText(@NotNull GridPos pos) { Object value = getElement(pos); DBDAttributeBinding column = model.getColumn(translateVisualPos(pos).col); return column.getValueHandler().getValueDisplayString(column.getAttribute(), value, DBDDisplayFormat.EDIT); } @Override public void updateColumn(@NotNull GridColumn column) { if (gridMode == GridMode.RECORD) { column.setSort(SWT.NONE); } else { column.setSort(SWT.DEFAULT); int index = column.getIndex(); DBDAttributeConstraint co = model.getDataFilter().getConstraint(model.getVisibleColumn(index)); if (co.getOrderPosition() > 0) { DBDAttributeBinding binding = co.getAttribute(); if (model.getVisibleColumns().indexOf(binding) == index) { column.setSort(co.isOrderDescending() ? SWT.UP : SWT.DOWN); } } column.setSortRenderer(new SortRenderer(column)); } } @Override public void dispose() { } @Override public void inputChanged(Viewer viewer, Object oldInput, Object newInput) { } } private class ContentLabelProvider implements IGridLabelProvider { @Nullable private Object getValue(int col, int row, boolean formatString) { Object value; DBDAttributeBinding column; int rowNum; int rowCount = model.getRowCount(); if (gridMode == GridMode.RECORD) { // Fill record rowNum = curRowNum; if (curRowNum >= rowCount || curRowNum < 0) { //log.warn("Bad current row number: " + curRowNum); return ""; } column = model.getVisibleColumn(row); Object[] values = model.getRowData(curRowNum); if (column.getAttributeIndex() >= values.length) { log.debug("Bad record row number: " + row); return null; } value = values[column.getAttributeIndex()]; } else { rowNum = row; if (row >= rowCount) { log.debug("Bad grid row number: " + row); return null; } if (col >= model.getVisibleColumnCount()) { log.debug("Bad grid column number: " + col); return null; } column = model.getVisibleColumn(col); value = model.getCellValue(row, column.getAttributeIndex()); } if (rowNum > 0 && rowNum == rowCount - 1 && (gridMode == GridMode.RECORD || spreadsheet.isRowVisible(rowNum)) && dataReceiver.isHasMoreData()) { readNextSegment(); } if (formatString) { return column.getValueHandler().getValueDisplayString( column.getMetaAttribute(), value, DBDDisplayFormat.UI); } else { return value; } } @Nullable @Override public Image getImage(int col, int row) { if (!showCelIcons) { return null; } DBDAttributeBinding attr; if (gridMode == GridMode.RECORD) { if (row >= model.getVisibleColumnCount()) { return null; } attr = model.getVisibleColumn(row); } else { if (col >= model.getVisibleColumnCount()) { return null; } attr = model.getVisibleColumn(col); } if ((attr.getValueHandler().getFeatures() & DBDValueHandler.FEATURE_SHOW_ICON) != 0) { return getTypeImage(attr.getMetaAttribute()); } else { return null; } } @Override public String getText(int col, int row) { return String.valueOf(getValue(col, row, true)); } @Nullable @Override public Color getForeground(int col, int row) { Object value = getValue(col, row, false); if (DBUtils.isNullValue(value)) { return foregroundNull; } else { return null; } } @Nullable @Override public Color getBackground(int col, int row) { if (gridMode == GridMode.RECORD) { col = row; row = curRowNum; } if (model.isRowAdded(row)) { return backgroundAdded; } if (model.isRowDeleted(row)) { return backgroundDeleted; } if (model.isDirty() && model.isCellModified( new GridPos(model.getVisibleColumn(col).getAttributeIndex(), row))) { return backgroundModified; } if (row % 2 == 0 && showOddRows) { return backgroundOdd; } return null; } } private class ColumnLabelProvider extends LabelProvider implements IFontProvider, ITooltipProvider { @Nullable @Override public Image getImage(Object element) { if (gridMode == GridMode.GRID) { int colNumber = ((Number)element).intValue(); return getTypeImage(model.getVisibleColumn(colNumber).getMetaAttribute()); } return null; } @Nullable @Override public String getText(Object element) { int colNumber = ((Number)element).intValue(); if (gridMode == GridMode.RECORD) { if (colNumber == 0) { return CoreMessages.controls_resultset_viewer_value; } else { log.warn("Bad column index: " + colNumber); return null; } } else { DBDAttributeBinding metaColumn = model.getVisibleColumn(colNumber); DBCAttributeMetaData attribute = metaColumn.getMetaAttribute(); if (CommonUtils.isEmpty(attribute.getLabel())) { return metaColumn.getAttributeName(); } else { return attribute.getLabel(); } /* return CommonUtils.isEmpty(metaColumn.getMetaData().getEntityName()) ? metaColumn.getMetaData().getName() : metaColumn.getMetaData().getEntityName() + "." + metaColumn.getMetaData().getName(); */ } } @Nullable @Override public Font getFont(Object element) { int colNumber = ((Number)element).intValue(); if (gridMode == GridMode.GRID) { DBDAttributeConstraint constraint = model.getDataFilter().getConstraint(model.getVisibleColumn(colNumber)); if (constraint != null && constraint.hasFilter()) { return boldFont; } } return null; } @Nullable @Override public String getTooltip(Object element) { int colNumber = ((Number)element).intValue(); if (gridMode == GridMode.GRID) { DBDAttributeBinding metaColumn = model.getVisibleColumn(colNumber); String name = metaColumn.getAttributeName(); String typeName = DBUtils.getFullTypeName(metaColumn.getMetaAttribute()); return name + ": " + typeName; } return null; } } private class RowLabelProvider extends LabelProvider { @Nullable @Override public Image getImage(Object element) { if (gridMode == GridMode.RECORD) { int rowNumber = ((Number) element).intValue(); if (rowNumber < 0) return null; return getTypeImage(model.getVisibleColumn(rowNumber).getMetaAttribute()); } return null; } @Nullable @Override public String getText(Object element) { int rowNumber = ((Number) element).intValue(); if (gridMode == GridMode.RECORD) { if (rowNumber < 0) return "Name"; return model.getVisibleColumn(rowNumber).getAttributeName(); } else { if (rowNumber < 0) return "#"; return String.valueOf(rowNumber + 1); } } } private class TopLeftRenderer extends DefaultRowHeaderRenderer { public TopLeftRenderer(LightGrid grid) { super(grid); } } private class ConfigAction extends Action implements IMenuCreator { public ConfigAction() { super(CoreMessages.controls_resultset_viewer_action_options, IAction.AS_DROP_DOWN_MENU); setImageDescriptor(DBIcon.CONFIGURATION.getImageDescriptor()); } @Override public IMenuCreator getMenuCreator() { return this; } @Override public void runWithEvent(Event event) { Menu menu = getMenu(getSpreadsheet()); if (menu != null && event.widget instanceof ToolItem) { Rectangle bounds = ((ToolItem) event.widget).getBounds(); Point point = ((ToolItem) event.widget).getParent().toDisplay(bounds.x, bounds.y + bounds.height); menu.setLocation(point.x, point.y); menu.setVisible(true); } } @Override public void dispose() { } @Override public Menu getMenu(Control parent) { MenuManager menuManager = new MenuManager(); menuManager.add(new ShowFiltersAction()); menuManager.add(new Separator()); menuManager.add(new VirtualKeyEditAction(true)); menuManager.add(new VirtualKeyEditAction(false)); menuManager.add(new DictionaryEditAction()); menuManager.add(new Separator()); menuManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_MODE, CommandContributionItem.STYLE_CHECK)); menuManager.add(ActionUtils.makeCommandContribution(site, ResultSetCommandHandler.CMD_TOGGLE_PREVIEW, CommandContributionItem.STYLE_CHECK)); menuManager.add(new Separator()); menuManager.add(new Action("Preferences") { @Override public void run() { UIUtils.showPreferencesFor( getControl().getShell(), ResultSetViewer.this, PrefPageDatabaseGeneral.PAGE_ID); } }); return menuManager.createContextMenu(parent); } @Nullable @Override public Menu getMenu(Menu parent) { return null; } } private class ShowFiltersAction extends Action { public ShowFiltersAction() { super(CoreMessages.controls_resultset_viewer_action_order_filter, DBIcon.FILTER.getImageDescriptor()); } @Override public void run() { new ResultSetFilterDialog(ResultSetViewer.this).open(); } } private class ToggleServerSideOrderingAction extends Action { public ToggleServerSideOrderingAction() { super(CoreMessages.pref_page_database_resultsets_label_server_side_order); } @Override public int getStyle() { return AS_CHECK_BOX; } @Override public boolean isChecked() { return getPreferenceStore().getBoolean(DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE); } @Override public void run() { IPreferenceStore preferenceStore = getPreferenceStore(); preferenceStore.setValue( DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE, !preferenceStore.getBoolean(DBeaverPreferences.RESULT_SET_ORDER_SERVER_SIDE)); } } private enum FilterByColumnType { VALUE(DBIcon.FILTER_VALUE.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { Object value = viewer.model.getCellValue( viewer.getCurrentRow(), column.getAttributeIndex()); return column.getValueHandler().getValueDisplayString(column.getMetaAttribute(), value, format); } }, INPUT(DBIcon.FILTER_INPUT.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { if (useDefault) { return ".."; } else { return EditTextDialog.editText( viewer.getControl().getShell(), "Enter value", ""); } } }, CLIPBOARD(DBIcon.FILTER_CLIPBOARD.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { try { return column.getValueHandler().getValueDisplayString( column.getMetaAttribute(), viewer.getColumnValueFromClipboard(column), format); } catch (DBCException e) { log.debug("Error copying from clipboard", e); return null; } } }, NONE(DBIcon.FILTER_VALUE.getImageDescriptor()) { @Override String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format) { return ""; } }; final ImageDescriptor icon; private FilterByColumnType(ImageDescriptor icon) { this.icon = icon; } @Nullable abstract String getValue(ResultSetViewer viewer, DBDAttributeBinding column, boolean useDefault, DBDDisplayFormat format); } private String translateFilterPattern(String pattern, FilterByColumnType type, DBDAttributeBinding column) { String value = CommonUtils.truncateString( CommonUtils.toString( type.getValue(this, column, true, DBDDisplayFormat.UI)), 30); return pattern.replace("?", value); } private class FilterByColumnAction extends Action { private final String pattern; private final FilterByColumnType type; private final DBDAttributeBinding column; public FilterByColumnAction(String pattern, FilterByColumnType type, DBDAttributeBinding column) { super(column.getAttributeName() + " " + translateFilterPattern(pattern, type, column), type.icon); this.pattern = pattern; this.type = type; this.column = column; } @Override public void run() { String value = type.getValue(ResultSetViewer.this, column, false, DBDDisplayFormat.NATIVE); if (value == null) { return; } String stringValue = pattern.replace("?", value); DBDDataFilter filter = model.getDataFilter(); filter.getConstraint(column).setCriteria(stringValue); updateFiltersText(); refresh(); } } private class FilterResetColumnAction extends Action { private final DBDAttributeBinding column; public FilterResetColumnAction(DBDAttributeBinding column) { super("Remove filter for '" + column.getAttributeName() + "'", DBIcon.REVERT.getImageDescriptor()); this.column = column; } @Override public void run() { model.getDataFilter().getConstraint(column).setCriteria(null); updateFiltersText(); refresh(); } } private class VirtualKeyEditAction extends Action { private boolean define; public VirtualKeyEditAction(boolean define) { super(define ? "Define virtual unique key" : "Clear virtual unique key"); this.define = define; } @Override public boolean isEnabled() { DBCEntityIdentifier identifier = getVirtualEntityIdentifier(); return identifier != null && (define || !CommonUtils.isEmpty(identifier.getAttributes())); } @Override public void run() { DBeaverUI.runUIJob("Edit virtual key", new DBRRunnableWithProgress() { @Override public void run(DBRProgressMonitor monitor) throws InvocationTargetException, InterruptedException { try { if (define) { editEntityIdentifier(monitor); } else { clearEntityIdentifier(monitor); } } catch (DBException e) { throw new InvocationTargetException(e); } } }); } } private class DictionaryEditAction extends Action { public DictionaryEditAction() { super("Define dictionary"); } @Override public void run() { } @Override public boolean isEnabled() { return false; } } private class ResultSetSelectionImpl implements ResultSetSelection { @Nullable @Override public GridPos getFirstElement() { Collection<GridPos> ssSelection = spreadsheet.getSelection(); return ssSelection.isEmpty() ? null : ssSelection.iterator().next(); } @Override public Iterator iterator() { return spreadsheet.getSelection().iterator(); } @Override public int size() { return spreadsheet.getSelection().size(); } @Override public Object[] toArray() { return spreadsheet.getSelection().toArray(); } @Override public List toList() { return new ArrayList<GridPos>(spreadsheet.getSelection()); } @Override public boolean isEmpty() { return spreadsheet.getSelection().isEmpty(); } @Override public ResultSetViewer getResultSetViewer() { return ResultSetViewer.this; } @Override public Collection<ResultSetRow> getSelectedRows() { List<ResultSetRow> rows = new ArrayList<ResultSetRow>(); if (gridMode == GridMode.RECORD) { if (curRowNum < 0 || curRowNum >= model.getRowCount()) { return Collections.emptyList(); } rows.add(new ResultSetRow(ResultSetViewer.this, model.getRowData(curRowNum))); } else { Collection<Integer> rowSelection = spreadsheet.getRowSelection(); for (Integer row : rowSelection) { rows.add(new ResultSetRow(ResultSetViewer.this, model.getRowData(row))); } } return rows; } @Override public boolean equals(Object obj) { return obj instanceof ResultSetSelectionImpl && super.equals(obj); } } /** * The column header sort arrow renderer. */ static class SortRenderer extends AbstractRenderer { private Image asterisk; private Image arrowUp; private Image arrowDown; private GridColumn column; private Cursor hoverCursor; SortRenderer(GridColumn column) { super(column.getParent()); this.column = column; this.asterisk = DBIcon.SORT_UNKNOWN.getImage(); this.arrowUp = DBIcon.SORT_DECREASE.getImage(); this.arrowDown = DBIcon.SORT_INCREASE.getImage(); this.hoverCursor = getDisplay().getSystemCursor(SWT.CURSOR_HAND); Rectangle imgBounds = arrowUp.getBounds(); setSize(imgBounds.width, imgBounds.height); } @Override public void paint(GC gc) { Rectangle bounds = getBounds(); switch (column.getSort()) { case SWT.DEFAULT: gc.drawImage(asterisk, bounds.x, bounds.y); break; case SWT.UP: gc.drawImage(arrowUp, bounds.x, bounds.y); break; case SWT.DOWN: gc.drawImage(arrowDown, bounds.x, bounds.y); break; } /* if (isSelected()) { gc.drawLine(bounds.x, bounds.y, bounds.x + 6, bounds.y); gc.drawLine(bounds.x + 1, bounds.y + 1, bounds.x + 5, bounds.y + 1); gc.drawLine(bounds.x + 2, bounds.y + 2, bounds.x + 4, bounds.y + 2); gc.drawPoint(bounds.x + 3, bounds.y + 3); } else { gc.drawPoint(bounds.x + 3, bounds.y); gc.drawLine(bounds.x + 2, bounds.y + 1, bounds.x + 4, bounds.y + 1); gc.drawLine(bounds.x + 1, bounds.y + 2, bounds.x + 5, bounds.y + 2); gc.drawLine(bounds.x, bounds.y + 3, bounds.x + 6, bounds.y + 3); } */ } @Override public Cursor getHoverCursor() { return hoverCursor; } } /* static class TopLeftRenderer extends AbstractRenderer { private Button cfgButton; public TopLeftRenderer(final ResultSetViewer resultSetViewer) { super(resultSetViewer.getSpreadsheet()); cfgButton = new Button(grid, SWT.FLAT | SWT.NO_FOCUS); cfgButton.setImage(DBIcon.FILTER.getImage()); cfgButton.addSelectionListener(new SelectionAdapter() { @Override public void widgetSelected(SelectionEvent e) { new ResultSetFilterDialog(resultSetViewer).open(); } }); ControlEditor controlEditor = new ControlEditor(grid); controlEditor.setEditor(cfgButton); //cfgButton.setText("..."); } @Override public void setBounds(Rectangle bounds) { Rectangle cfgBounds = new Rectangle(bounds.x + 1, bounds.y + 1, bounds.width - 2, bounds.height - 2); cfgButton.setBounds(bounds); super.setBounds(bounds); } @Override public void paint(GC gc) { //cfgButton.redraw(); //gc.drawImage(DBIcon.FILTER.getImage(), 0, 0); } } */ }
Update edit controls after data segment read
plugins/org.jkiss.dbeaver.core/src/org/jkiss/dbeaver/ui/controls/resultset/ResultSetViewer.java
Update edit controls after data segment read
Java
bsd-2-clause
268cde8727253d88ddc3a6e367e410e3f64bec3f
0
octavianiLocator/g3m,AeroGlass/g3m,octavianiLocator/g3m,octavianiLocator/g3m,AeroGlass/g3m,AeroGlass/g3m,octavianiLocator/g3m,octavianiLocator/g3m,AeroGlass/g3m,octavianiLocator/g3m,AeroGlass/g3m,AeroGlass/g3m
package org.glob3.mobile.generated; public class NonOverlappingMark { private float _springLengthInPixels; private Vector3D _cartesianPos; private Geodetic3D _geoPosition ; private float _dX; //Velocity vector (pixels per second) private float _dY; private float _fX; //Applied Force private float _fY; private MarkWidget _widget; private MarkWidget _anchorWidget; private final float _springK; private final float _maxSpringLength; private final float _minSpringLength; private final float _electricCharge; private final float _anchorElectricCharge; private final float _maxWidgetSpeedInPixelsPerSecond; private final float _resistanceFactor; private final float _minWidgetSpeedInPixelsPerSecond; public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge, float maxWidgetSpeedInPixelsPerSecond, float minWidgetSpeedInPixelsPerSecond) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, anchorElectricCharge, maxWidgetSpeedInPixelsPerSecond, minWidgetSpeedInPixelsPerSecond, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge, float maxWidgetSpeedInPixelsPerSecond) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, anchorElectricCharge, maxWidgetSpeedInPixelsPerSecond, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, anchorElectricCharge, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, 1.0f, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, 10.0f, 1.0f, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position) { this(imageBuilderWidget, imageBuilderAnchor, position, null, 10.0f, 1.0f, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge, float maxWidgetSpeedInPixelsPerSecond, float minWidgetSpeedInPixelsPerSecond, float resistanceFactor) { _geoPosition = new Geodetic3D(position); _springLengthInPixels = springLengthInPixels; _cartesianPos = null; _dX = 0F; _dY = 0F; _fX = 0F; _fY = 0F; _widget = new MarkWidget(imageBuilderWidget); _anchorWidget = new MarkWidget(imageBuilderAnchor); _springK = springK; _maxSpringLength = maxSpringLength; _minSpringLength = minSpringLength; _electricCharge = electricCharge; _maxWidgetSpeedInPixelsPerSecond = maxWidgetSpeedInPixelsPerSecond; _anchorElectricCharge = anchorElectricCharge; _resistanceFactor = resistanceFactor; _minWidgetSpeedInPixelsPerSecond = minWidgetSpeedInPixelsPerSecond; if (touchListener != null) { _widget.setTouchListener(touchListener); } } public void dispose() { if (_cartesianPos != null) _cartesianPos.dispose(); } public final Vector3D getCartesianPosition(Planet planet) { if (_cartesianPos == null) { _cartesianPos = new Vector3D(planet.toCartesian(_geoPosition)); } return _cartesianPos; } public final void computeAnchorScreenPos(Camera cam, Planet planet) { Vector2F sp = new Vector2F(cam.point2Pixel(getCartesianPosition(planet))); _anchorWidget.setScreenPos(sp._x, sp._y); if (_widget.getScreenPos().isNaN()) { _widget.setScreenPos(sp._x, sp._y + 0.01f); } } public final Vector2F getScreenPos() { return _widget.getScreenPos(); } public final Vector2F getAnchorScreenPos() { return _anchorWidget.getScreenPos(); } public final void render(G3MRenderContext rc, GLState glState) { if (_widget.isReady() && _anchorWidget.isReady()) { _widget.render(rc, glState); _anchorWidget.render(rc, glState); } else { _widget.init(rc, rc.getCurrentCamera().getViewPortWidth(), rc.getCurrentCamera().getViewPortHeight()); _anchorWidget.init(rc, rc.getCurrentCamera().getViewPortWidth(), rc.getCurrentCamera().getViewPortHeight()); } } public final void applyCoulombsLaw(NonOverlappingMark that) { Vector2F d = getScreenPos().sub(that.getScreenPos()); double distance = d.length() + 0.001; Vector2F direction = d.div((float)distance); float strength = (float)(this._electricCharge * that._electricCharge / (distance * distance)); Vector2F force = direction.times(strength); this.applyForce(force._x, force._y); that.applyForce(-force._x, -force._y); // var d = point1.p.subtract(point2.p); // var distance = d.magnitude() + 0.1; // avoid massive forces at small distances (and divide by zero) // var direction = d.normalise(); // // // apply force to each end point // point1.applyForce(direction.multiply(this.repulsion).divide(distance * distance * 0.5)); // point2.applyForce(direction.multiply(this.repulsion).divide(distance * distance * -0.5)); } public final void applyCoulombsLawFromAnchor(NonOverlappingMark that) { Vector2F dAnchor = getScreenPos().sub(that.getAnchorScreenPos()); double distanceAnchor = dAnchor.length() + 0.001; Vector2F directionAnchor = dAnchor.div((float)distanceAnchor); float strengthAnchor = (float)(this._electricCharge * that._anchorElectricCharge / (distanceAnchor * distanceAnchor)); this.applyForce(directionAnchor._x * strengthAnchor, directionAnchor._y * strengthAnchor); } public final void applyHookesLaw() { Vector2F d = getScreenPos().sub(getAnchorScreenPos()); double mod = d.length(); double displacement = _springLengthInPixels - mod; Vector2F direction = d.div((float)mod); float force = (float)(_springK * displacement); applyForce((float)(direction._x * force), (float)(direction._y * force)); // var d = spring.point2.p.subtract(spring.point1.p); // the direction of the spring // var displacement = spring.length - d.magnitude(); // var direction = d.normalise(); // // // apply force to each end point // spring.point1.applyForce(direction.multiply(spring.k * displacement * -0.5)); // spring.point2.applyForce(direction.multiply(spring.k * displacement * 0.5)); } public final void applyForce(float x, float y) { _fX += x; _fY += y; } public final void updatePositionWithCurrentForce(double elapsedMS, float viewportWidth, float viewportHeight) { Vector2D oldVelocity = new Vector2D(_dX, _dY); Vector2D force = new Vector2D(_fX, _fY); //Assuming Widget Mass = 1.0 float time = (float)(elapsedMS / 1000.0); Vector2D velocity = oldVelocity.add(force.times(time)).times(_resistanceFactor); //Resistance force applied as x0.85 //Force has been applied and must be reset _fX = 0F; _fY = 0F; //Clamping Velocity double velocityPPS = velocity.length(); if (velocityPPS > _maxWidgetSpeedInPixelsPerSecond) { _dX = (float)(velocity._x * (_maxWidgetSpeedInPixelsPerSecond / velocityPPS)); _dY = (float)(velocity._y * (_maxWidgetSpeedInPixelsPerSecond / velocityPPS)); } else { if (velocityPPS < _minWidgetSpeedInPixelsPerSecond) { _dX = 0.0F; _dY = 0.0F; } else { //Normal case _dX = (float)velocity._x; _dY = (float)velocity._y; } } //Update position Vector2F position = _widget.getScreenPos(); float newX = position._x + (_dX * time); float newY = position._y + (_dY * time); Vector2F anchorPos = _anchorWidget.getScreenPos(); Vector2F spring = new Vector2F(newX,newY).sub(anchorPos).clampLength(_minSpringLength, _maxSpringLength); Vector2F finalPos = anchorPos.add(spring); _widget.setScreenPos(finalPos._x, finalPos._y); _widget.clampPositionInsideScreen((int)viewportWidth, (int)viewportHeight, 5); // 5 pixels of margin } public final void onResizeViewportEvent(int width, int height) { _widget.onResizeViewportEvent(width, height); _anchorWidget.onResizeViewportEvent(width, height); } public final void resetWidgetPositionVelocityAndForce() { _widget.resetPosition(); _dX = 0F; _dY = 0F; _fX = 0F; _fY = 0F; } public final boolean onTouchEvent(float x, float y) { return _widget.onTouchEvent(x, y); } }
Commons/G3MSharedSDK/src/org/glob3/mobile/generated/NonOverlappingMark.java
package org.glob3.mobile.generated; public class NonOverlappingMark { private float _springLengthInPixels; private Vector3D _cartesianPos; private Geodetic3D _geoPosition ; private float _dX; //Velocity vector (pixels per second) private float _dY; private float _fX; //Applied Force private float _fY; private MarkWidget _widget; private MarkWidget _anchorWidget; private final float _springK; private final float _maxSpringLength; private final float _minSpringLength; private final float _electricCharge; private final float _anchorElectricCharge; private final float _maxWidgetSpeedInPixelsPerSecond; private final float _resistanceFactor; private final float _minWidgetSpeedInPixelsPerSecond; public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge, float maxWidgetSpeedInPixelsPerSecond, float minWidgetSpeedInPixelsPerSecond) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, anchorElectricCharge, maxWidgetSpeedInPixelsPerSecond, minWidgetSpeedInPixelsPerSecond, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge, float maxWidgetSpeedInPixelsPerSecond) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, anchorElectricCharge, maxWidgetSpeedInPixelsPerSecond, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, anchorElectricCharge, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, electricCharge, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, minSpringLength, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, maxSpringLength, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, springK, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, springLengthInPixels, 1.0f, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener) { this(imageBuilderWidget, imageBuilderAnchor, position, touchListener, 10.0f, 1.0f, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position) { this(imageBuilderWidget, imageBuilderAnchor, position, null, 10.0f, 1.0f, 100.0f, 5.0f, 3000.0f, 1500.0f, 1000.0f, 35.0f, 0.95f); } public NonOverlappingMark(IImageBuilder imageBuilderWidget, IImageBuilder imageBuilderAnchor, Geodetic3D position, MarkWidgetTouchListener touchListener, float springLengthInPixels, float springK, float maxSpringLength, float minSpringLength, float electricCharge, float anchorElectricCharge, float maxWidgetSpeedInPixelsPerSecond, float minWidgetSpeedInPixelsPerSecond, float resistanceFactor) { _geoPosition = new Geodetic3D(position); _springLengthInPixels = springLengthInPixels; _cartesianPos = null; _dX = 0F; _dY = 0F; _fX = 0F; _fY = 0F; _widget = imageBuilderWidget; _anchorWidget = imageBuilderAnchor; _springK = springK; _maxSpringLength = maxSpringLength; _minSpringLength = minSpringLength; _electricCharge = electricCharge; _maxWidgetSpeedInPixelsPerSecond = maxWidgetSpeedInPixelsPerSecond; _anchorElectricCharge = anchorElectricCharge; _resistanceFactor = resistanceFactor; _minWidgetSpeedInPixelsPerSecond = minWidgetSpeedInPixelsPerSecond; if (touchListener != null) { _widget.setTouchListener(touchListener); } } public void dispose() { if (_cartesianPos != null) _cartesianPos.dispose(); } public final Vector3D getCartesianPosition(Planet planet) { if (_cartesianPos == null) { _cartesianPos = new Vector3D(planet.toCartesian(_geoPosition)); } return _cartesianPos; } public final void computeAnchorScreenPos(Camera cam, Planet planet) { Vector2F sp = new Vector2F(cam.point2Pixel(getCartesianPosition(planet))); _anchorWidget.setScreenPos(sp._x, sp._y); if (_widget.getScreenPos().isNaN()) { _widget.setScreenPos(sp._x, sp._y + 0.01f); } } public final Vector2F getScreenPos() { return _widget.getScreenPos(); } public final Vector2F getAnchorScreenPos() { return _anchorWidget.getScreenPos(); } public final void render(G3MRenderContext rc, GLState glState) { if (_widget.isReady() && _anchorWidget.isReady()) { _widget.render(rc, glState); _anchorWidget.render(rc, glState); } else { _widget.init(rc, rc.getCurrentCamera().getViewPortWidth(), rc.getCurrentCamera().getViewPortHeight()); _anchorWidget.init(rc, rc.getCurrentCamera().getViewPortWidth(), rc.getCurrentCamera().getViewPortHeight()); } } public final void applyCoulombsLaw(NonOverlappingMark that) { Vector2F d = getScreenPos().sub(that.getScreenPos()); double distance = d.length() + 0.001; Vector2F direction = d.div((float)distance); float strength = (float)(this._electricCharge * that._electricCharge / (distance * distance)); Vector2F force = direction.times(strength); this.applyForce(force._x, force._y); that.applyForce(-force._x, -force._y); // var d = point1.p.subtract(point2.p); // var distance = d.magnitude() + 0.1; // avoid massive forces at small distances (and divide by zero) // var direction = d.normalise(); // // // apply force to each end point // point1.applyForce(direction.multiply(this.repulsion).divide(distance * distance * 0.5)); // point2.applyForce(direction.multiply(this.repulsion).divide(distance * distance * -0.5)); } public final void applyCoulombsLawFromAnchor(NonOverlappingMark that) { Vector2F dAnchor = getScreenPos().sub(that.getAnchorScreenPos()); double distanceAnchor = dAnchor.length() + 0.001; Vector2F directionAnchor = dAnchor.div((float)distanceAnchor); float strengthAnchor = (float)(this._electricCharge * that._anchorElectricCharge / (distanceAnchor * distanceAnchor)); this.applyForce(directionAnchor._x * strengthAnchor, directionAnchor._y * strengthAnchor); } public final void applyHookesLaw() { Vector2F d = getScreenPos().sub(getAnchorScreenPos()); double mod = d.length(); double displacement = _springLengthInPixels - mod; Vector2F direction = d.div((float)mod); float force = (float)(_springK * displacement); applyForce((float)(direction._x * force), (float)(direction._y * force)); // var d = spring.point2.p.subtract(spring.point1.p); // the direction of the spring // var displacement = spring.length - d.magnitude(); // var direction = d.normalise(); // // // apply force to each end point // spring.point1.applyForce(direction.multiply(spring.k * displacement * -0.5)); // spring.point2.applyForce(direction.multiply(spring.k * displacement * 0.5)); } public final void applyForce(float x, float y) { _fX += x; _fY += y; } public final void updatePositionWithCurrentForce(double elapsedMS, float viewportWidth, float viewportHeight) { Vector2D oldVelocity = new Vector2D(_dX, _dY); Vector2D force = new Vector2D(_fX, _fY); //Assuming Widget Mass = 1.0 float time = (float)(elapsedMS / 1000.0); Vector2D velocity = oldVelocity.add(force.times(time)).times(_resistanceFactor); //Resistance force applied as x0.85 //Force has been applied and must be reset _fX = 0F; _fY = 0F; //Clamping Velocity double velocityPPS = velocity.length(); if (velocityPPS > _maxWidgetSpeedInPixelsPerSecond) { _dX = (float)(velocity._x * (_maxWidgetSpeedInPixelsPerSecond / velocityPPS)); _dY = (float)(velocity._y * (_maxWidgetSpeedInPixelsPerSecond / velocityPPS)); } else { if (velocityPPS < _minWidgetSpeedInPixelsPerSecond) { _dX = 0.0F; _dY = 0.0F; } else { //Normal case _dX = (float)velocity._x; _dY = (float)velocity._y; } } //Update position Vector2F position = _widget.getScreenPos(); float newX = position._x + (_dX * time); float newY = position._y + (_dY * time); Vector2F anchorPos = _anchorWidget.getScreenPos(); Vector2F spring = new Vector2F(newX,newY).sub(anchorPos).clampLength(_minSpringLength, _maxSpringLength); Vector2F finalPos = anchorPos.add(spring); _widget.setScreenPos(finalPos._x, finalPos._y); _widget.clampPositionInsideScreen((int)viewportWidth, (int)viewportHeight, 5); // 5 pixels of margin } public final void onResizeViewportEvent(int width, int height) { _widget.onResizeViewportEvent(width, height); _anchorWidget.onResizeViewportEvent(width, height); } public final void resetWidgetPositionVelocityAndForce() { _widget.resetPosition(); _dX = 0F; _dY = 0F; _fX = 0F; _fY = 0F; } public final boolean onTouchEvent(float x, float y) { return _widget.onTouchEvent(x, y); } }
Generated
Commons/G3MSharedSDK/src/org/glob3/mobile/generated/NonOverlappingMark.java
Generated
Java
bsd-2-clause
f7c59bf8ba6c6e189e246ac978173d5d9f23c653
0
KronosDesign/runelite,abelbriggs1/runelite,devinfrench/runelite,runelite/runelite,runelite/runelite,KronosDesign/runelite,abelbriggs1/runelite,devinfrench/runelite,Sethtroll/runelite,l2-/runelite,Sethtroll/runelite,Noremac201/runelite,abelbriggs1/runelite,runelite/runelite,Noremac201/runelite,l2-/runelite
/* * Copyright (c) 2017, Robin Weymans <[email protected]> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.hunter; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics2D; import java.awt.Point; import java.awt.geom.Arc2D; import javax.inject.Inject; import net.runelite.api.Client; import net.runelite.api.widgets.Widget; import net.runelite.client.ui.overlay.Overlay; import net.runelite.client.ui.overlay.OverlayLayer; import net.runelite.client.ui.overlay.OverlayPosition; /** * Represents the overlay that shows timers on traps that are placed by the * player. */ public class TrapOverlay extends Overlay { /** * Size of the trap timer. */ private static final int TIMER_SIZE = 25; /** * Width of the border around the trap timer. */ private static final int TIMER_BORDER_WIDTH = 1; /** * The timer is low when only 25% is left. */ private static final double TIMER_LOW = 0.25; // When the timer is under a quarter left, if turns red. private final Client client; private final HunterPlugin plugin; private final HunterConfig config; private Color colorOpen, colorOpenBorder; private Color colorEmpty, colorEmptyBorder; private Color colorFull, colorFullBorder; private Color colorTrans, colorTransBorder; @Inject TrapOverlay(Client client, HunterPlugin plugin, HunterConfig config) { setPosition(OverlayPosition.DYNAMIC); setLayer(OverlayLayer.ABOVE_SCENE); this.plugin = plugin; this.config = config; this.client = client; } @Override public Dimension render(Graphics2D graphics, Point parent) { drawTraps(graphics); return null; } /** * Updates the timer colors. */ public void updateConfig() { colorEmptyBorder = config.getEmptyTrapColor(); colorEmpty = new Color(colorEmptyBorder.getRed(), colorEmptyBorder.getGreen(), colorEmptyBorder.getBlue(), 100); colorFullBorder = config.getFullTrapColor(); colorFull = new Color(colorFullBorder.getRed(), colorFullBorder.getGreen(), colorFullBorder.getBlue(), 100); colorOpenBorder = config.getOpenTrapColor(); colorOpen = new Color(colorOpenBorder.getRed(), colorOpenBorder.getGreen(), colorOpenBorder.getBlue(), 100); colorTransBorder = config.getTransTrapColor(); colorTrans = new Color(colorTransBorder.getRed(), colorTransBorder.getGreen(), colorTransBorder.getBlue(), 100); } /** * Iterates over all the traps that were placed by the local player, and * draws a circle or a timer on the trap, depending on the trap state. * * @param graphics */ private void drawTraps(Graphics2D graphics) { Widget viewport = client.getViewportWidget(); for (HunterTrap trap : plugin.getTraps()) { net.runelite.api.Point trapLoc = trap.getGameObject().getCanvasLocation(); if (viewport != null && trapLoc != null && viewport.contains(trapLoc)) { switch (trap.getState()) { case OPEN: drawTimerOnTrap(graphics, trap, colorOpen, colorOpenBorder, colorEmpty, colorOpenBorder); break; case EMPTY: drawTimerOnTrap(graphics, trap, colorEmpty, colorEmptyBorder, colorEmpty, colorEmptyBorder); break; case FULL: drawCircleOnTrap(graphics, trap, colorFull, colorFullBorder); break; case TRANSITION: drawCircleOnTrap(graphics, trap, colorTrans, colorTransBorder); break; } } } } /** * Draws a timer on a given trap. * * @param graphics * @param trap The trap on which the timer needs to be drawn * @param fill The fill color of the timer * @param border The border color of the timer * @param fillTimeLow The fill color of the timer when it is low * @param borderTimeLow The border color of the timer when it is low */ private void drawTimerOnTrap(Graphics2D graphics, HunterTrap trap, Color fill, Color border, Color fillTimeLow, Color borderTimeLow) { net.runelite.api.Point loc = trap.getGameObject().getCanvasLocation(); //Construct the arc Arc2D.Float arc = new Arc2D.Float(Arc2D.PIE); arc.setAngleStart(90); double timeLeft = 1 - trap.getTrapTimeRelative(); arc.setAngleExtent(timeLeft * 360); arc.setFrame(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); //Draw the inside of the arc graphics.setColor(timeLeft > TIMER_LOW ? fill : fillTimeLow); graphics.fill(arc); //Draw the outlines of the arc graphics.setStroke(new BasicStroke(TIMER_BORDER_WIDTH)); graphics.setColor(timeLeft > TIMER_LOW ? border : borderTimeLow); graphics.drawOval(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); } /** * Draws a timer on a given trap. * * @param graphics * @param trap The trap on which the timer needs to be drawn * @param fill The fill color of the timer * @param border The border color of the timer */ private void drawCircleOnTrap(Graphics2D graphics, HunterTrap trap, Color fill, Color border) { net.runelite.api.Point loc = trap.getGameObject().getCanvasLocation(); //Draw the inside of the arc graphics.setColor(fill); graphics.fillOval(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); //Draw the border of the cirlce graphics.setColor(border); graphics.setStroke(new BasicStroke(TIMER_BORDER_WIDTH)); graphics.drawOval(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); } }
runelite-client/src/main/java/net/runelite/client/plugins/hunter/TrapOverlay.java
/* * Copyright (c) 2017, Robin Weymans <[email protected]> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR * ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package net.runelite.client.plugins.hunter; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Dimension; import java.awt.Graphics2D; import java.awt.Point; import java.awt.geom.Arc2D; import javax.inject.Inject; import net.runelite.api.Client; import net.runelite.api.widgets.Widget; import net.runelite.client.ui.overlay.Overlay; import net.runelite.client.ui.overlay.OverlayLayer; import net.runelite.client.ui.overlay.OverlayPosition; /** * Represents the overlay that shows timers on traps that are placed by the * player. */ public class TrapOverlay extends Overlay { /** * Size of the trap timer. */ private static final int TIMER_SIZE = 25; /** * Width of the border around the trap timer. */ private static final int TIMER_BORDER_WIDTH = 1; /** * The timer is low when only 25% is left. */ private static final double TIMER_LOW = 0.25; // When the timer is under a quarter left, if turns red. private final Client client; private final HunterPlugin plugin; private final HunterConfig config; private Color colorOpen, colorOpenBorder; private Color colorEmpty, colorEmptyBorder; private Color colorFull, colorFullBorder; private Color colorTrans, colorTransBorder; @Inject TrapOverlay(Client client, HunterPlugin plugin, HunterConfig config) { setPosition(OverlayPosition.DYNAMIC); setLayer(OverlayLayer.ABOVE_SCENE); this.plugin = plugin; this.config = config; this.client = client; } @Override public Dimension render(Graphics2D graphics, Point parent) { drawTraps(graphics); return null; } /** * Updates the timer colors. */ public void updateConfig() { colorEmptyBorder = config.getEmptyTrapColor(); colorEmpty = new Color(colorEmptyBorder.getRed(), colorEmptyBorder.getGreen(), colorEmptyBorder.getBlue(), 100); colorFullBorder = config.getFullTrapColor(); colorFull = new Color(colorFullBorder.getRed(), colorFullBorder.getGreen(), colorFullBorder.getBlue(), 100); colorOpenBorder = config.getOpenTrapColor(); colorOpen = new Color(colorOpenBorder.getRed(), colorOpenBorder.getGreen(), colorOpenBorder.getBlue(), 100); colorTransBorder = config.getTransTrapColor(); colorTrans = new Color(colorTransBorder.getRed(), colorTransBorder.getGreen(), colorTransBorder.getBlue(), 100); } /** * Iterates over all the traps that were placed by the local player, and * draws a circle or a timer on the trap, depending on the trap state. * * @param graphics */ private void drawTraps(Graphics2D graphics) { Widget viewport = client.getViewportWidget(); for (HunterTrap trap : plugin.getTraps()) { if (viewport != null && viewport.contains(trap.getGameObject().getCanvasLocation())) { switch (trap.getState()) { case OPEN: drawTimerOnTrap(graphics, trap, colorOpen, colorOpenBorder, colorEmpty, colorOpenBorder); break; case EMPTY: drawTimerOnTrap(graphics, trap, colorEmpty, colorEmptyBorder, colorEmpty, colorEmptyBorder); break; case FULL: drawCircleOnTrap(graphics, trap, colorFull, colorFullBorder); break; case TRANSITION: drawCircleOnTrap(graphics, trap, colorTrans, colorTransBorder); break; } } } } /** * Draws a timer on a given trap. * * @param graphics * @param trap The trap on which the timer needs to be drawn * @param fill The fill color of the timer * @param border The border color of the timer * @param fillTimeLow The fill color of the timer when it is low * @param borderTimeLow The border color of the timer when it is low */ private void drawTimerOnTrap(Graphics2D graphics, HunterTrap trap, Color fill, Color border, Color fillTimeLow, Color borderTimeLow) { net.runelite.api.Point loc = trap.getGameObject().getCanvasLocation(); //Construct the arc Arc2D.Float arc = new Arc2D.Float(Arc2D.PIE); arc.setAngleStart(90); double timeLeft = 1 - trap.getTrapTimeRelative(); arc.setAngleExtent(timeLeft * 360); arc.setFrame(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); //Draw the inside of the arc graphics.setColor(timeLeft > TIMER_LOW ? fill : fillTimeLow); graphics.fill(arc); //Draw the outlines of the arc graphics.setStroke(new BasicStroke(TIMER_BORDER_WIDTH)); graphics.setColor(timeLeft > TIMER_LOW ? border : borderTimeLow); graphics.drawOval(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); } /** * Draws a timer on a given trap. * * @param graphics * @param trap The trap on which the timer needs to be drawn * @param fill The fill color of the timer * @param border The border color of the timer */ private void drawCircleOnTrap(Graphics2D graphics, HunterTrap trap, Color fill, Color border) { net.runelite.api.Point loc = trap.getGameObject().getCanvasLocation(); //Draw the inside of the arc graphics.setColor(fill); graphics.fillOval(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); //Draw the border of the cirlce graphics.setColor(border); graphics.setStroke(new BasicStroke(TIMER_BORDER_WIDTH)); graphics.drawOval(loc.getX() - TIMER_SIZE / 2, loc.getY() - TIMER_SIZE / 2, TIMER_SIZE, TIMER_SIZE); } }
hunter plugin: Fix null pointer when going across a loading zone
runelite-client/src/main/java/net/runelite/client/plugins/hunter/TrapOverlay.java
hunter plugin: Fix null pointer when going across a loading zone
Java
mit
a884c94fa348f59cba7656ccff3b33916eb008c1
0
CMPUT301F17T14/gitrekt
package com.example.habitrack; import android.content.Context; import android.util.Log; import com.google.gson.Gson; import com.google.gson.reflect.TypeToken; import java.io.BufferedReader; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStreamReader; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.OutputStreamWriter; import java.lang.reflect.Type; import java.util.Calendar; import java.util.ArrayList; /** * HabitEventController * * Version 1.0 * * Created by sshussai on 10/21/17. */ public class HabitEventController { /** * This class is the main interface for the habit event entity. It can create a new habit event * and delete it, given a corresponding habit type * */ private Context hectx; private final String FILE_NAME = "habitEvents.sav"; private final String ID_FILE_NAME = "heid.sav"; public HabitEventController(Context ctx){ this.hectx = ctx; } public void createNewHabitEvent(Integer habitTypeID){ HabitTypeController htc = new HabitTypeController(hectx); HabitEvent he = new HabitEvent(HabitEventStateManager.getHEStateManager().getHabitEventID(), habitTypeID); // Save the new HE ID saveHEID(); he.setTitle(htc.getHabitTitle(habitTypeID)); HabitEventStateManager.getHEStateManager().storeHabitEvent(he); // Save event on elastic search ElasticSearchController.AddHabitEvent addHabitEvent = new ElasticSearchController.AddHabitEvent(); addHabitEvent.execute(he); // Save event locally saveToFile(); // Increment the completed event counter for the habit type htc.incrementHTCurrentCounter(habitTypeID); } public void createNewHabitEvent(Integer habitTypeID, String comment){ HabitTypeController htc = new HabitTypeController(hectx); HabitEvent he = new HabitEvent(HabitEventStateManager.getHEStateManager().getHabitEventID(), habitTypeID); he.setTitle(htc.getHabitTitle(habitTypeID)); he.setComment(comment); HabitEventStateManager.getHEStateManager().storeHabitEvent(he); // Save event on elastic search ElasticSearchController.AddHabitEvent addHabitEvent = new ElasticSearchController.AddHabitEvent(); addHabitEvent.execute(he); // Save event locally saveToFile(); } public ArrayList<HabitEvent> getAllHabitEvent(){ return HabitEventStateManager.getHEStateManager().getAllHabitEvents(); } public void updateRecentHabitEvents(){ HabitEventStateManager.getHEStateManager().updateRecentHabitEvents(); } /** * This function returns the list of recent events * @return */ public ArrayList<HabitEvent> getRecentHabitEvents(){ return HabitEventStateManager.getHEStateManager().getRecentHabitevents(); } /** * This function deletes all habit events */ public void deleteAllHabitTypes(){ HabitEventStateManager.getHEStateManager().removeRecentHabitEvents(); HabitEventStateManager.getHEStateManager().removeAllHabitEvents(); // Save event locally saveToFile(); } /** * this function deletes all the habit types scheduled for today */ public void deleteHabitTypesForToday(){ HabitTypeStateManager.getHTStateManager().removeHabitTypesForToday(); } public void deleteHabitEvent(Integer requestedID) { HabitEventStateManager.getHEStateManager().removeHabitEvent(requestedID); // Save event locally saveToFile(); } public HabitEvent getHabitEvent(Integer requestedID) { HabitEvent he = HabitEventStateManager.getHEStateManager().getHabitEvent(requestedID); return he; } public ArrayList<HabitEvent> getHabitEventElasticSearch(){ ArrayList<HabitEvent> he = new ArrayList<>(); ElasticSearchController.GetHabitEvent getHabitEvent = new ElasticSearchController.GetHabitEvent(); getHabitEvent.execute(""); try { he = getHabitEvent.get(); } catch (Exception e) { Log.i("Error","Failed to get the tweets from the async object"); } return he; } public void editHabitEventTitle(Integer requestedID, String newTitle){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ he.setTitle(newTitle); // Save event locally saveToFile(); } } public void editHabitEventComment(Integer requestedID, String newComment){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ he.setComment(newComment); // Save event locally saveToFile(); } } public void editHabitEventDate(Integer requestedID, Calendar newDate){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ he.setDate(newDate); // Save event locally saveToFile(); } } public String getHabitEventTitle(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getTitle(); } else { return ""; } } public String getHabitEventComment(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getComment(); } else { return ""; } } public Calendar getHabitEventDate(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); Calendar cal; // If the habit event exists if(!he.getHabitEventID().equals(-1)){ cal = he.getDate(); } else { cal = Calendar.getInstance(); cal.set(Calendar.YEAR, -1); } return cal; } public Integer getHabitEventID(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getHabitEventID(); } else { return -1; } } public Integer getCorrespondingHabitTypeID(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getHabitTypeID(); } else { return -1; } } public void loadFromFile() { ArrayList<HabitEvent> heList = new ArrayList<HabitEvent>(); try { FileInputStream fis = hectx.openFileInput(FILE_NAME); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); Gson gson = new Gson(); //Code taken from http://stackoverflow.com/questions/12384064/gson-convert-from-json-to-a-typed-arraylistt Sept.22,2016 Type listType = new TypeToken<ArrayList<HabitEvent>>(){}.getType(); heList = gson.fromJson(in, listType); } catch (FileNotFoundException e) { // TODO Auto-generated catch block heList = new ArrayList<HabitEvent>(); } catch (IOException e) { // TODO Auto-generated catch block throw new RuntimeException(); } HabitEventStateManager.getHEStateManager().setAllHabitEvents(heList); } public void saveToFile() { ArrayList<HabitEvent> heList = getAllHabitEvent(); try { FileOutputStream fos = hectx.openFileOutput(FILE_NAME,0); OutputStreamWriter writer = new OutputStreamWriter(fos); Gson gson = new Gson(); gson.toJson(heList, writer); writer.flush(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block throw new RuntimeException(); } catch (IOException e) { // TODO Auto-generated catch block throw new RuntimeException(); } } public void saveHEID(){ Integer saveID = HabitEventStateManager.getHEStateManager().getIDToSave(); try { FileOutputStream fos = hectx.openFileOutput(ID_FILE_NAME,0); OutputStreamWriter writer = new OutputStreamWriter(fos); Gson gson = new Gson(); gson.toJson(saveID, writer); writer.flush(); } catch (FileNotFoundException e) { // TODO Auto-generated catch block throw new RuntimeException(); } catch (IOException e) { // TODO Auto-generated catch block throw new RuntimeException(); } } public void loadHEID() { Integer loadedID; try { FileInputStream fis = hectx.openFileInput(ID_FILE_NAME); BufferedReader in = new BufferedReader(new InputStreamReader(fis)); Gson gson = new Gson(); //Code taken from http://stackoverflow.com/questions/12384064/gson-convert-from-json-to-a-typed-arraylistt Sept.22,2016 Type intType = new TypeToken<Integer>(){}.getType(); loadedID = gson.fromJson(in, intType); } catch (FileNotFoundException e) { // TODO Auto-generated catch block loadedID = 0; } catch (IOException e) { // TODO Auto-generated catch block throw new RuntimeException(); } HabitEventStateManager.getHEStateManager().setID(loadedID); } }
HabiTrack/app/src/main/java/com/example/habitrack/HabitEventController.java
package com.example.habitrack; import android.content.Context; import android.util.Log; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.util.Calendar; import java.util.ArrayList; /** * HabitEventController * * Version 1.0 * * Created by sshussai on 10/21/17. */ public class HabitEventController { /** * This class is the main interface for the habit event entity. It can create a new habit event * and delete it, given a corresponding habit type * */ private Context hectx; private final String FILE_NAME = "habitEvents.sav"; private final String ID_FILE_NAME = "heid.sav"; public HabitEventController(Context ctx){ this.hectx = ctx; } public void createNewHabitEvent(Integer habitTypeID){ HabitTypeController htc = new HabitTypeController(hectx); HabitEvent he = new HabitEvent(HabitEventStateManager.getHEStateManager().getHabitEventID(), habitTypeID); he.setTitle(htc.getHabitTitle(habitTypeID)); HabitEventStateManager.getHEStateManager().storeHabitEvent(he); // Save event on elastic search ElasticSearchController.AddHabitEvent addHabitEvent = new ElasticSearchController.AddHabitEvent(); addHabitEvent.execute(he); // Save event locally saveToFile(); } public void createNewHabitEvent(Integer habitTypeID, String comment){ HabitTypeController htc = new HabitTypeController(hectx); HabitEvent he = new HabitEvent(HabitEventStateManager.getHEStateManager().getHabitEventID(), habitTypeID); he.setTitle(htc.getHabitTitle(habitTypeID)); he.setComment(comment); HabitEventStateManager.getHEStateManager().storeHabitEvent(he); // Save event on elastic search ElasticSearchController.AddHabitEvent addHabitEvent = new ElasticSearchController.AddHabitEvent(); addHabitEvent.execute(he); // Save event locally saveToFile(); } public ArrayList<HabitEvent> getAllHabitEvent(){ return HabitEventStateManager.getHEStateManager().getAllHabitEvents(); } public void updateRecentHabitEvents(){ HabitEventStateManager.getHEStateManager().updateRecentHabitEvents(); } /** * This function returns the list of recent events * @return */ public ArrayList<HabitEvent> getRecentHabitEvents(){ return HabitEventStateManager.getHEStateManager().getRecentHabitevents(); } /** * This function deletes all habit events */ public void deleteAllHabitTypes(){ HabitEventStateManager.getHEStateManager().removeRecentHabitEvents(); HabitEventStateManager.getHEStateManager().removeAllHabitEvents(); // Save event locally saveToFile(); } /** * this function deletes all the habit types scheduled for today */ public void deleteHabitTypesForToday(){ HabitTypeStateManager.getHTStateManager().removeHabitTypesForToday(); } public void deleteHabitEvent(Integer requestedID) { HabitEventStateManager.getHEStateManager().removeHabitEvent(requestedID); // Save event locally saveToFile(); } public HabitEvent getHabitEvent(Integer requestedID) { HabitEvent he = HabitEventStateManager.getHEStateManager().getHabitEvent(requestedID); return he; } public ArrayList<HabitEvent> getHabitEventElasticSearch(){ ArrayList<HabitEvent> he = new ArrayList<>(); ElasticSearchController.GetHabitEvent getHabitEvent = new ElasticSearchController.GetHabitEvent(); getHabitEvent.execute(""); try { he = getHabitEvent.get(); } catch (Exception e) { Log.i("Error","Failed to get the tweets from the async object"); } return he; } public void editHabitEventTitle(Integer requestedID, String newTitle){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ he.setTitle(newTitle); // Save event locally saveToFile(); } } public void editHabitEventComment(Integer requestedID, String newComment){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ he.setComment(newComment); // Save event locally saveToFile(); } } public void editHabitEventDate(Integer requestedID, Calendar newDate){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ he.setDate(newDate); // Save event locally saveToFile(); } } public String getHabitEventTitle(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getTitle(); } else { return ""; } } public String getHabitEventComment(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getComment(); } else { return ""; } } public Calendar getHabitEventDate(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); Calendar cal; // If the habit event exists if(!he.getHabitEventID().equals(-1)){ cal = he.getDate(); } else { cal = Calendar.getInstance(); cal.set(Calendar.YEAR, -1); } return cal; } public Integer getHabitEventID(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getHabitEventID(); } else { return -1; } } public Integer getCorrespondingHabitTypeID(Integer requestedID){ HabitEvent he = this.getHabitEvent(requestedID); // If the habit event exists if(!he.getHabitEventID().equals(-1)){ return he.getHabitTypeID(); } else { return -1; } } @SuppressWarnings("unchecked") public void loadFromFile() { ArrayList<HabitEvent> heList = new ArrayList<HabitEvent>(); try { FileInputStream fis = hectx.openFileInput(FILE_NAME); ObjectInputStream ois = new ObjectInputStream(fis); Object o = ois.readObject(); if (o instanceof ArrayList) { heList = (ArrayList<HabitEvent>) o; } else { Log.i("HabiTrack HE:Load", "Error casting"); } } catch (FileNotFoundException e) { e.printStackTrace(); Log.i("HabiTrack HE:Load", "File Not Found"); HabitEventStateManager.getHEStateManager().setAllHabitEvents(heList); } catch (IOException e) { e.printStackTrace(); Log.i("HabiTrack HE:Load", "IOException"); HabitEventStateManager.getHEStateManager().setAllHabitEvents(heList); } catch (ClassNotFoundException e) { e.printStackTrace(); Log.i("HabiTrack HE:Load", "ClassNotFound"); HabitEventStateManager.getHEStateManager().setAllHabitEvents(heList); } HabitEventStateManager.getHEStateManager().setAllHabitEvents(heList); } public void saveToFile() { ArrayList<HabitEvent> heList = getAllHabitEvent(); try { FileOutputStream fos = hectx.openFileOutput(FILE_NAME, 0); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(heList); fos.close(); } catch (FileNotFoundException e) { e.printStackTrace(); Log.i("HabiTrack HE:Save", "File Not Found"); } catch (IOException e) { e.printStackTrace(); Log.i("HabiTrack HE:Save", "IO Exception"); } } public void saveHEID(){ Integer saveID = HabitEventStateManager.getHEStateManager().getIDToSave(); try { FileOutputStream fos = hectx.openFileOutput(ID_FILE_NAME, 0); ObjectOutputStream oos = new ObjectOutputStream(fos); oos.writeObject(saveID); fos.close(); } catch (FileNotFoundException e) { e.printStackTrace(); Log.i("HabiTrack HE:SaveID", "File Not Found"); } catch (IOException e) { e.printStackTrace(); Log.i("HabiTrack HE:SaveID", "IO Exception"); } } @SuppressWarnings("unchecked") public void loadHEID() { //ArrayList<HabitType> htList = new ArrayList<HabitType>(); Integer loadedID = 0; try { FileInputStream fis = hectx.openFileInput(FILE_NAME); ObjectInputStream ois = new ObjectInputStream(fis); Object o = ois.readObject(); if (o instanceof ArrayList) { //htList = (ArrayList<HabitType>) o; loadedID = (Integer) o; } else { Log.i("HabiTrack HE:", "Error casting"); } } catch (FileNotFoundException e) { e.printStackTrace(); Log.i("HabiTrack HE:Load", "File Not Found"); } catch (IOException e) { e.printStackTrace(); Log.i("HabiTrack HE:Load", "IOException"); } catch (ClassNotFoundException e) { e.printStackTrace(); Log.i("HabiTrack HE:Load", "ClassNotFound"); } HabitEventStateManager.getHEStateManager().setID(loadedID); } }
change load/save methods for habit events to use gson. also increment counter for habittype upon event creation
HabiTrack/app/src/main/java/com/example/habitrack/HabitEventController.java
change load/save methods for habit events to use gson. also increment counter for habittype upon event creation
Java
mit
1ae742505a10e3cfad568d0096d066e8ecc606dd
0
freeuni-sdp/arkanoid-16
package ge.edu.freeuni.sdp.arkanoid.model; import java.util.ArrayList; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Created by nika on 4/2/16. */ public class DeathCountListener implements GobjDeathListener { private AtomicInteger _count; private ArrayList<LevelOverListener> _overListeners; public DeathCountListener(int count) { _count = new AtomicInteger(count); _overListeners = new ArrayList<>(); } public void setLevelOverListener(LevelOverListener listener) { _overListeners.add(listener); } public void setCount(int count) { _count.set(count); } public int getCount() { return _count.get(); } @Override public void gobjDied(Gobj obj, String reason) { int value = _count.addAndGet(-1); if (value == 0) notifyAllLevelOver(); } protected void notifyAllLevelOver() { _overListeners.forEach(LevelOverListener::levelOver); } }
src/main/java/ge/edu/freeuni/sdp/arkanoid/model/DeathCountListener.java
package ge.edu.freeuni.sdp.arkanoid.model; import java.util.ArrayList; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; /** * Created by nika on 4/2/16. */ public class DeathCountListener implements GobjDeathListener { private int _count; private ArrayList<LevelOverListener> _overListeners; private Lock _countLock; public DeathCountListener(int count) { _count = count; _overListeners = new ArrayList<>(); _countLock = new ReentrantLock(); } public void setLevelOverListener(LevelOverListener listener) { _overListeners.add(listener); } public void setCount(int count) { _countLock.lock(); _count = count; _countLock.unlock(); } public int getCount() { int res; _countLock.lock(); res = _count; _countLock.unlock(); return res; } @Override public void gobjDied(Gobj obj, String reason) { boolean notify; _countLock.lock(); _count--; notify = (_count == 0); _countLock.unlock(); if (notify) notifyAllLevelOver(); } protected void notifyAllLevelOver() { _overListeners.forEach(LevelOverListener::levelOver); } }
Change DeathCountListener to use AtomicInteger
src/main/java/ge/edu/freeuni/sdp/arkanoid/model/DeathCountListener.java
Change DeathCountListener to use AtomicInteger
Java
mit
1df5046895915d8518be9bd996f4e16f4ca59694
0
chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster,chipster/chipster
package fi.csc.chipster.tools.gbrowser; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import fi.csc.microarray.client.visualisation.methods.gbrowser.fileFormat.ColumnType; import fi.csc.microarray.client.visualisation.methods.gbrowser.fileFormat.ElandParser; import fi.csc.microarray.client.visualisation.methods.gbrowser.fileFormat.FileDefinition; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.BpCoord; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.Chromosome; import fi.csc.microarray.exception.MicroarrayException; public class TsvSorter { private int chrCol; private int bpCol; public void sort(File in, File out, int chrCol, int bpCol) throws Exception { this.chrCol = chrCol; this.bpCol = bpCol; externalSort(in, out); } private class Row extends BpCoord{ public String line; public Row(String line){ super(null, null); this.line = line; String[] splitted = line.split("\t"); String chrStr = splitted.length > chrCol ? splitted[chrCol] : ""; String bpStr = splitted.length > bpCol ? splitted[bpCol] : ""; chr = new Chromosome(chrStr.replace("chr", "").replace(".fa", "")); if(bpStr.equals("")){ bp = -1l; } else { bp = Long.parseLong(bpStr); } } } private void externalSort(File infile, File outfile) throws IOException, MicroarrayException { BufferedReader initReader = new BufferedReader(new FileReader(infile)); ArrayList<Row> rowBatch = new ArrayList<Row>(500000); boolean quit = false; int numFiles = 0; while (!quit) { //showProgress("Reading..."); // limit chunks to 200MB int size = 0; while (size < 200000000) { //while (size < 10000000) { String line = initReader.readLine(); if (line == null) { quit = true; break; } rowBatch.add(new Row(line)); size += line.length(); } //showProgress("Sorting..."); // Use Java's sort. Collections.sort(rowBatch); //showProgress("Writing..."); // write to disk FileWriter fw = new FileWriter(infile + "_chunk" + numFiles); BufferedWriter bw = new BufferedWriter(fw); for (int i = 0; i < rowBatch.size(); i++) { bw.append(rowBatch.get(i).line + "\n"); } bw.close(); numFiles++; rowBatch.clear(); } //showProgress("Merging..."); mergeFiles(infile.getAbsolutePath(), outfile, numFiles); //showProgress("DONE"); initReader.close(); } private void mergeFiles(String inputFilePath, File outputFilePath, int numChunkFiles) throws IOException, MicroarrayException { ArrayList<BufferedReader> mergefbr = new ArrayList<BufferedReader>(); ArrayList<Row> filerows = new ArrayList<Row>(); FileWriter fw = new FileWriter(outputFilePath); BufferedWriter bw = new BufferedWriter(fw); boolean someFileStillHasRows = false; for (int i = 0; i < numChunkFiles; i++) { mergefbr.add(new BufferedReader(new FileReader(inputFilePath + "_chunk" + i))); // get the first row String line = mergefbr.get(i).readLine(); if (line != null) { filerows.add(new Row(line)); someFileStillHasRows = true; } else { filerows.add(null); } } Row row; while (someFileStillHasRows) { Row min; int minIndex = 0; row = filerows.get(0); if (row != null) { min = row; minIndex = 0; } else { min = null; minIndex = -1; } // check which one is min for (int i = 1; i < filerows.size(); i++) { row = filerows.get(i); if (min != null) { if (row != null && (row.compareTo(min) < 0)) { minIndex = i; min = filerows.get(i); } } else { if (row != null) { min = row; minIndex = i; } } } if (minIndex < 0) { someFileStillHasRows = false; } else { // write to the sorted file bw.append(filerows.get(minIndex).line + "\n"); // get another row from the file that had the min String line = mergefbr.get(minIndex).readLine(); if (line != null) { filerows.set(minIndex, new Row(line)); } else { filerows.set(minIndex, null); } } // check if one still has rows for (int i = 0; i < filerows.size(); i++) { someFileStillHasRows = false; if (filerows.get(i) != null) { if (minIndex < 0) { throw new MicroarrayException("Error in sorting: " + "mindex lt 0 and found row not null" + filerows.get(i)); } someFileStillHasRows = true; break; } } // check the actual files one more time if (!someFileStillHasRows) { //write the last one not covered above for (int i = 0; i < filerows.size(); i++) { if (filerows.get(i) == null) { String line = mergefbr.get(i).readLine(); if (line != null) { someFileStillHasRows = true; filerows.set(i, new Row(line)); } } } } } // close all the files bw.close(); fw.close(); for (int i = 0; i < mergefbr.size(); i++) mergefbr.get(i).close(); // Delete all of the chunk files. for (int i = 0; i < numChunkFiles; i++) { File f = new File(inputFilePath + "_chunk" + i); f.delete(); } } public static void main(String[] args) throws Exception { FileDefinition def = new ElandParser().getFileDefinition(); new TsvSorter().sort(new File("infile.txt"), new File("infile.txt"), def.indexOf(ColumnType.CHROMOSOME), def.indexOf(ColumnType.BP_START)); } }
src/main/java/fi/csc/chipster/tools/gbrowser/TsvSorter.java
package fi.csc.chipster.tools.gbrowser; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.BpCoord; import fi.csc.microarray.client.visualisation.methods.gbrowser.message.Chromosome; import fi.csc.microarray.exception.MicroarrayException; public class TsvSorter { private int chrCol; private int bpCol; public void sort(File in, File out, int chrCol, int bpCol) throws Exception { this.chrCol = chrCol; this.bpCol = bpCol; externalSort(in, out); } private class Row extends BpCoord{ public String line; public Row(String line){ super(null, null); this.line = line; String[] splitted = line.split("\t"); String chrStr = splitted.length > chrCol ? splitted[chrCol] : ""; String bpStr = splitted.length > bpCol ? splitted[bpCol] : ""; chr = new Chromosome(chrStr.replace("chr", "").replace(".fa", "")); if(bpStr.equals("")){ bp = -1l; } else { bp = Long.parseLong(bpStr); } } } private void externalSort(File infile, File outfile) throws IOException, MicroarrayException { BufferedReader initReader = new BufferedReader(new FileReader(infile)); ArrayList<Row> rowBatch = new ArrayList<Row>(500000); boolean quit = false; int numFiles = 0; while (!quit) { //showProgress("Reading..."); // limit chunks to 200MB int size = 0; while (size < 200000000) { //while (size < 10000000) { String line = initReader.readLine(); if (line == null) { quit = true; break; } rowBatch.add(new Row(line)); size += line.length(); } //showProgress("Sorting..."); // Use Java's sort. Collections.sort(rowBatch); //showProgress("Writing..."); // write to disk FileWriter fw = new FileWriter(infile + "_chunk" + numFiles); BufferedWriter bw = new BufferedWriter(fw); for (int i = 0; i < rowBatch.size(); i++) { bw.append(rowBatch.get(i).line + "\n"); } bw.close(); numFiles++; rowBatch.clear(); } //showProgress("Merging..."); mergeFiles(infile.getAbsolutePath(), outfile, numFiles); //showProgress("DONE"); initReader.close(); } private void mergeFiles(String inputFilePath, File outputFilePath, int numChunkFiles) throws IOException, MicroarrayException { ArrayList<BufferedReader> mergefbr = new ArrayList<BufferedReader>(); ArrayList<Row> filerows = new ArrayList<Row>(); FileWriter fw = new FileWriter(outputFilePath); BufferedWriter bw = new BufferedWriter(fw); boolean someFileStillHasRows = false; for (int i = 0; i < numChunkFiles; i++) { mergefbr.add(new BufferedReader(new FileReader(inputFilePath + "_chunk" + i))); // get the first row String line = mergefbr.get(i).readLine(); if (line != null) { filerows.add(new Row(line)); someFileStillHasRows = true; } else { filerows.add(null); } } Row row; while (someFileStillHasRows) { Row min; int minIndex = 0; row = filerows.get(0); if (row != null) { min = row; minIndex = 0; } else { min = null; minIndex = -1; } // check which one is min for (int i = 1; i < filerows.size(); i++) { row = filerows.get(i); if (min != null) { if (row != null && (row.compareTo(min) < 0)) { minIndex = i; min = filerows.get(i); } } else { if (row != null) { min = row; minIndex = i; } } } if (minIndex < 0) { someFileStillHasRows = false; } else { // write to the sorted file bw.append(filerows.get(minIndex).line + "\n"); // get another row from the file that had the min String line = mergefbr.get(minIndex).readLine(); if (line != null) { filerows.set(minIndex, new Row(line)); } else { filerows.set(minIndex, null); } } // check if one still has rows for (int i = 0; i < filerows.size(); i++) { someFileStillHasRows = false; if (filerows.get(i) != null) { if (minIndex < 0) { throw new MicroarrayException("Error in sorting: " + "mindex lt 0 and found row not null" + filerows.get(i)); } someFileStillHasRows = true; break; } } // check the actual files one more time if (!someFileStillHasRows) { //write the last one not covered above for (int i = 0; i < filerows.size(); i++) { if (filerows.get(i) == null) { String line = mergefbr.get(i).readLine(); if (line != null) { someFileStillHasRows = true; filerows.set(i, new Row(line)); } } } } } // close all the files bw.close(); fw.close(); for (int i = 0; i < mergefbr.size(); i++) mergefbr.get(i).close(); // Delete all of the chunk files. for (int i = 0; i < numChunkFiles; i++) { File f = new File(inputFilePath + "_chunk" + i); f.delete(); } } }
main added for testing
src/main/java/fi/csc/chipster/tools/gbrowser/TsvSorter.java
main added for testing
Java
mit
0ba22035368ce4deb3d9908dff6ed71a41c680f4
0
regini/inSquare,regini/inSquare,regini/inSquare,regini/inSquare,regini/inSquare
package com.nsqre.insquare.Utilities.REST;/* Created by umbertosonnino on 10/3/16 */ import android.content.Context; import android.util.Log; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.StringRequest; import com.android.volley.toolbox.Volley; import com.google.gson.GsonBuilder; import com.nsqre.insquare.Square.Square; import com.nsqre.insquare.Square.SquareDeserializer; import java.util.Locale; /** * This class manages the HTTP requests made with Volley from all the Activities andr Fragments of the application */ public class VolleyManager { public interface VolleyResponseListener<E> { // Handler per le risposte // ======================= // Risposta ad una GET void responseGET(E object); // Risposta ad una POST void responsePOST(E object); // Risposta per la PATCH void responsePATCH(E object); // Risposta per la DELETE void responseDELETE(E object); } private static final String TAG = "VolleyManager"; private static VolleyManager instance = null; private static Locale locale; private static final String prefixURL = "http://recapp-insquare.rhcloud.com/"; public RequestQueue requestQueue; private VolleyManager(Context c) { Log.d(TAG, "VolleyManager: just instantiated the object privately!"); requestQueue = Volley.newRequestQueue(c.getApplicationContext()); } public static synchronized VolleyManager getInstance(Context c, Locale l) { if(instance == null) { locale = l; instance = new VolleyManager(c); } Log.d(TAG, "getInstance: returning VolleyManger"); return instance; } public static synchronized VolleyManager getInstance() { if(instance == null) { throw new IllegalStateException(VolleyManager.class.getSimpleName() + " is not initialized, call getInstance(context) first"); } return instance; } public void searchSquaresByName(String query, String userId, double lat, double lon, final VolleyResponseListener listener) { String reqURL = prefixURL + "squares?"; String name = query.replace(" ", "%20"); reqURL += "name=" + name; reqURL += "&lat=" + lat; reqURL += "&lon=" + lon; reqURL += "&userId=" + userId; Log.d(TAG, "searchSquaresByName: " + reqURL); StringRequest searchSquaresByNameRequest = new StringRequest(Request.Method.GET, reqURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "searchSquaresByName: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(locale)); Square[] squares = builder.create().fromJson(response, Square[].class); Log.d(TAG, "I found: " + squares.toString()); listener.responseGET(squares); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (error.networkResponse != null) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseGET(null); } } } ); requestQueue.add(searchSquaresByNameRequest); } public void getClosestSquares(String distance, double lat, double lon, VolleyResponseListener listener) { String reqURL = prefixURL + "squares?"; reqURL += "distance=" + distance; reqURL += "&lat=" + lat; reqURL += "&lon=" + lon; Log.d(TAG, "getClosestSquares: " + reqURL); StringRequest closeSquareRequest = new StringRequest(Request.Method.GET, reqURL, new Response.Listener<String>() { @Override public void onResponse(String response) { // SquareDeserializer si occupa di costruire l'oggetto Square in maniera appropriata // new MapFragment.MapFiller().execute(response); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (error.networkResponse != null) { Log.d(TAG, "onErrorResponse: " + error.networkResponse.statusCode); } } } ); requestQueue.add(closeSquareRequest); } public void postSquare(final String squareName, final String squareDescr, final String latitude, final String longitude, final String ownerId, final Locale loc, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; volleyURL += "name=" + squareName; volleyURL += "&description=" + squareDescr; volleyURL += "&lat=" + latitude; volleyURL += "&lon=" + longitude; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "postSquare url: " + volleyURL); StringRequest postSquareRequest = new StringRequest(Request.Method.POST, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "postSquare response: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(loc)); Square s = builder.create().fromJson(response, Square.class); listener.responsePOST(s); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); // Risposta in caso di errore e' null listener.responsePOST(null); } } ); requestQueue.add(postSquareRequest); } public void getOwnedSquares(boolean byOwner, String ownerId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; volleyURL += "byOwner=" + byOwner; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "getOwnedSquares url: " + volleyURL ); StringRequest getOwnedRequest = new StringRequest(Request.Method.GET, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "getOwnedSquares response: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(locale)); Square[] squares = builder.create().fromJson(response, Square[].class); Log.d(TAG, "I created: " + squares.toString()); listener.responseGET(squares); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseGET(null); } } ); requestQueue.add(getOwnedRequest); } public void getFavs( final String userId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "favouritesquares/"; volleyURL += userId; Log.d(TAG, "getFavs url: " + volleyURL); StringRequest getFavsRequest = new StringRequest( Request.Method.GET, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "getFavs response: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(locale)); Square[] squares = builder.create().fromJson(response, Square[].class); for(Square s: squares) Log.d(TAG, "I obtained: " + s.toString()); listener.responseGET(squares); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseGET(null); } } ); requestQueue.add(getFavsRequest); } public void patchDescription( String name, String description, final String squareId, final String ownerId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; description = description.replace(" ", "%20"); name = name.replace(" ", "%20"); volleyURL += "name=" + name; volleyURL += "&description=" + description; volleyURL += "&squareId=" + squareId; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "patchDescr url: " + volleyURL); StringRequest patchDescriptionRequest = new StringRequest( Request.Method.PATCH, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "patchDescription response: " + response); if(response.toLowerCase().contains("non")) { listener.responsePATCH(false); }else { // Tutto e' andato bene listener.responsePATCH(true); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responsePATCH(false); } } ); requestQueue.add(patchDescriptionRequest); } public void deleteSquare( final String squareId, final String ownerId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; volleyURL += "&squareId=" + squareId; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "deleteSquare url: " + volleyURL); StringRequest deleteSquareRequest = new StringRequest( Request.Method.DELETE, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "deleteSquare response is " + response); if(response.toLowerCase().contains("error")) { listener.responseDELETE(false); } else { listener.responseDELETE(true); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseDELETE(false); } }); requestQueue.add(deleteSquareRequest); } }
inSquareAndroid/app/src/main/java/com/nsqre/insquare/Utilities/REST/VolleyManager.java
package com.nsqre.insquare.Utilities.REST;/* Created by umbertosonnino on 10/3/16 */ import android.content.Context; import android.util.Log; import com.android.volley.Request; import com.android.volley.RequestQueue; import com.android.volley.Response; import com.android.volley.VolleyError; import com.android.volley.toolbox.StringRequest; import com.android.volley.toolbox.Volley; import com.google.gson.GsonBuilder; import com.nsqre.insquare.Square.Square; import com.nsqre.insquare.Square.SquareDeserializer; import java.util.Locale; /** * This class manages the HTTP requests made with Volley from all the Activities andr Fragments of the application */ public class VolleyManager { public interface VolleyResponseListener<E> { // Handler per le risposte // ======================= // Risposta ad una GET void responseGET(E object); // Risposta ad una POST void responsePOST(E object); // Risposta per la PATCH void responsePATCH(E object); // Risposta per la DELETE void responseDELETE(E object); } private static final String TAG = "VolleyManager"; private static VolleyManager instance = null; private static Locale locale; private static final String prefixURL = "http://recapp-insquare.rhcloud.com/"; public RequestQueue requestQueue; private VolleyManager(Context c) { Log.d(TAG, "VolleyManager: just instantiated the object privately!"); requestQueue = Volley.newRequestQueue(c.getApplicationContext()); } public static synchronized VolleyManager getInstance(Context c, Locale l) { if(instance == null) { locale = l; instance = new VolleyManager(c); } Log.d(TAG, "getInstance: returning VolleyManger"); return instance; } public static synchronized VolleyManager getInstance() { if(instance == null) { throw new IllegalStateException(VolleyManager.class.getSimpleName() + " is not initialized, call getInstance(context) first"); } return instance; } public void searchSquaresByName(String query, String userId, double lat, double lon, final VolleyResponseListener listener) { String reqURL = prefixURL + "squares?"; String name = query.replace(" ", "%20"); reqURL += "name=" + name; reqURL += "lat=" + name; reqURL += "name=" + name; Log.d(TAG, "searchSquaresByName: " + reqURL); StringRequest searchSquaresByNameRequest = new StringRequest(Request.Method.GET, reqURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "searchSquaresByName: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(locale)); Square[] squares = builder.create().fromJson(response, Square[].class); Log.d(TAG, "I found: " + squares.toString()); listener.responseGET(squares); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (error.networkResponse != null) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseGET(null); } } } ); requestQueue.add(searchSquaresByNameRequest); } public void getClosestSquares(String distance, double lat, double lon, VolleyResponseListener listener) { String reqURL = prefixURL + "squares?"; reqURL += "distance=" + distance; reqURL += "&lat=" + lat; reqURL += "&lon=" + lon; Log.d(TAG, "getClosestSquares: " + reqURL); StringRequest closeSquareRequest = new StringRequest(Request.Method.GET, reqURL, new Response.Listener<String>() { @Override public void onResponse(String response) { // SquareDeserializer si occupa di costruire l'oggetto Square in maniera appropriata // new MapFragment.MapFiller().execute(response); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { if (error.networkResponse != null) { Log.d(TAG, "onErrorResponse: " + error.networkResponse.statusCode); } } } ); requestQueue.add(closeSquareRequest); } public void postSquare(final String squareName, final String squareDescr, final String latitude, final String longitude, final String ownerId, final Locale loc, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; volleyURL += "name=" + squareName; volleyURL += "&description=" + squareDescr; volleyURL += "&lat=" + latitude; volleyURL += "&lon=" + longitude; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "postSquare url: " + volleyURL); StringRequest postSquareRequest = new StringRequest(Request.Method.POST, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "postSquare response: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(loc)); Square s = builder.create().fromJson(response, Square.class); listener.responsePOST(s); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); // Risposta in caso di errore e' null listener.responsePOST(null); } } ); requestQueue.add(postSquareRequest); } public void getOwnedSquares(boolean byOwner, String ownerId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; volleyURL += "byOwner=" + byOwner; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "getOwnedSquares url: " + volleyURL ); StringRequest getOwnedRequest = new StringRequest(Request.Method.GET, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "getOwnedSquares response: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(locale)); Square[] squares = builder.create().fromJson(response, Square[].class); Log.d(TAG, "I created: " + squares.toString()); listener.responseGET(squares); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseGET(null); } } ); requestQueue.add(getOwnedRequest); } public void getFavs( final String userId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "favouritesquares/"; volleyURL += userId; Log.d(TAG, "getFavs url: " + volleyURL); StringRequest getFavsRequest = new StringRequest( Request.Method.GET, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "getFavs response: " + response); GsonBuilder builder = new GsonBuilder(); builder.registerTypeAdapter(Square.class, new SquareDeserializer(locale)); Square[] squares = builder.create().fromJson(response, Square[].class); for(Square s: squares) Log.d(TAG, "I obtained: " + s.toString()); listener.responseGET(squares); } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseGET(null); } } ); requestQueue.add(getFavsRequest); } public void patchDescription( String name, String description, final String squareId, final String ownerId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; description = description.replace(" ", "%20"); name = name.replace(" ", "%20"); volleyURL += "name=" + name; volleyURL += "&description=" + description; volleyURL += "&squareId=" + squareId; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "patchDescr url: " + volleyURL); StringRequest patchDescriptionRequest = new StringRequest( Request.Method.PATCH, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "patchDescription response: " + response); if(response.toLowerCase().contains("non")) { listener.responsePATCH(false); }else { // Tutto e' andato bene listener.responsePATCH(true); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responsePATCH(false); } } ); requestQueue.add(patchDescriptionRequest); } public void deleteSquare( final String squareId, final String ownerId, final VolleyResponseListener listener) { String volleyURL = prefixURL + "squares?"; volleyURL += "&squareId=" + squareId; volleyURL += "&ownerId=" + ownerId; Log.d(TAG, "deleteSquare url: " + volleyURL); StringRequest deleteSquareRequest = new StringRequest( Request.Method.DELETE, volleyURL, new Response.Listener<String>() { @Override public void onResponse(String response) { Log.d(TAG, "deleteSquare response is " + response); if(response.toLowerCase().contains("error")) { listener.responseDELETE(false); } else { listener.responseDELETE(true); } } }, new Response.ErrorListener() { @Override public void onErrorResponse(VolleyError error) { Log.d(TAG, "onErrorResponse: " + error.toString()); listener.responseDELETE(false); } }); requestQueue.add(deleteSquareRequest); } }
Search query by lat lon and userId
inSquareAndroid/app/src/main/java/com/nsqre/insquare/Utilities/REST/VolleyManager.java
Search query by lat lon and userId
Java
mit
dd6de650fa7c2adebf647b67f1539d27af0ae16c
0
rgabbard-bbn/kbp-2014-event-arguments,BBN-E/tac-kbp-eal,BBN-E/tac-kbp-eal,isi-nlp/tac-kbp-eal,isi-nlp/tac-kbp-eal,rgabbard-bbn/kbp-2014-event-arguments
package com.bbn.kbp.events; import com.bbn.bue.common.Finishable; import com.bbn.bue.common.HasDocID; import com.bbn.bue.common.Inspector; import com.bbn.bue.common.IntIDSequence; import com.bbn.bue.common.StringUtils; import com.bbn.bue.common.TextGroupPackageImmutable; import com.bbn.bue.common.evaluation.AggregateBinaryFScoresInspector; import com.bbn.bue.common.evaluation.BinaryErrorLogger; import com.bbn.bue.common.evaluation.BinaryFScoreBootstrapStrategy; import com.bbn.bue.common.evaluation.BootstrapInspector; import com.bbn.bue.common.evaluation.EquivalenceBasedProvenancedAligner; import com.bbn.bue.common.evaluation.EvalPair; import com.bbn.bue.common.evaluation.InspectionNode; import com.bbn.bue.common.evaluation.InspectorTreeDSL; import com.bbn.bue.common.evaluation.InspectorTreeNode; import com.bbn.bue.common.evaluation.ProvenancedAlignment; import com.bbn.bue.common.evaluation.ScoringEventObserver; import com.bbn.bue.common.files.FileUtils; import com.bbn.bue.common.parameters.Parameters; import com.bbn.bue.common.symbols.Symbol; import com.bbn.bue.common.symbols.SymbolUtils; import com.bbn.kbp.events.ontology.EREToKBPEventOntologyMapper; import com.bbn.kbp.events.ontology.SimpleEventOntologyMapper; import com.bbn.kbp.events2014.CharOffsetSpan; import com.bbn.kbp.events2014.DocumentSystemOutput2015; import com.bbn.kbp.events2014.KBPRealis; import com.bbn.kbp.events2014.Response; import com.bbn.kbp.events2014.ResponseLinking; import com.bbn.kbp.events2014.ResponseSet; import com.bbn.kbp.events2014.SystemOutputLayout; import com.bbn.kbp.events2014.TACKBPEALException; import com.bbn.kbp.events2014.io.SystemOutputStore; import com.bbn.kbp.events2014.transformers.QuoteFilter; import com.bbn.kbp.linking.ExplicitFMeasureInfo; import com.bbn.kbp.linking.LinkF1; import com.bbn.nlp.corenlp.CoreNLPDocument; import com.bbn.nlp.corenlp.CoreNLPParseNode; import com.bbn.nlp.corenlp.CoreNLPXMLLoader; import com.bbn.nlp.corpora.ere.EREArgument; import com.bbn.nlp.corpora.ere.EREDocument; import com.bbn.nlp.corpora.ere.EREEvent; import com.bbn.nlp.corpora.ere.EREEventMention; import com.bbn.nlp.corpora.ere.ERELoader; import com.bbn.nlp.corpora.ere.ERESpan; import com.bbn.nlp.corpora.ere.LinkRealis; import com.bbn.nlp.events.HasEventType; import com.bbn.nlp.parsing.HeadFinders; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.HashMultiset; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Multiset; import com.google.common.collect.Sets; import com.google.common.io.CharSink; import com.google.common.io.Files; import com.google.common.reflect.TypeToken; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Provides; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; import org.immutables.func.Functional; import org.immutables.value.Value; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.Map; import java.util.Random; import java.util.Set; import javax.annotation.Nullable; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.inspect; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformBoth; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformLeft; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformRight; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformed; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Predicates.compose; import static com.google.common.base.Predicates.equalTo; import static com.google.common.base.Predicates.in; import static com.google.common.base.Predicates.not; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.filter; import static com.google.common.collect.Iterables.getFirst; import static com.google.common.collect.Iterables.transform; /** * Scores KBP 2016 event argument output against an ERE gold standard. Scoring is in terms of * (Event Type, Event Role, Entity) tuples. This program is an experimental rough draft and has a * number of limitations: <ul> <li>We only handle arguments which are entity mentions; others are * ignored according to the ERE structure on the gold side and by filtering out a (currently * hardcoded) set of argument roles on the system side.</li> <i>We map system responses to entities * by looking for an entity which has a mention which shares the character offsets of the base * filler exactly either by itself or by its nominal head (given in ERE). In the future we may * implement more lenient alignment strategies.</i> <li> Currently system responses which fail to * align to any entity at all are discarded rather than penalized.</li> </ul> */ public final class ScoreKBPAgainstERE { private static final Logger log = LoggerFactory.getLogger(ScoreKBPAgainstERE.class); private final EREToKBPEventOntologyMapper ontologyMapper; private ScoreKBPAgainstERE() { throw new UnsupportedOperationException(); } // left over from pre-Guice version private final Parameters params; private final ImmutableMap<String, ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers; // we exclude text in quoted regions froms scoring private final QuoteFilter quoteFilter; @Inject ScoreKBPAgainstERE( final Parameters params, final Map<String, ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final EREToKBPEventOntologyMapper ontologyMapper, final QuoteFilter quoteFilter) { this.params = checkNotNull(params); // we use a sorted map because the binding of plugins may be non-deterministic this.scoringEventObservers = ImmutableSortedMap.copyOf(scoringEventObservers); this.ontologyMapper = checkNotNull(ontologyMapper); this.quoteFilter = checkNotNull(quoteFilter); } public void go() throws IOException { log.info(params.dump()); final ImmutableSet<Symbol> docIDsToScore = ImmutableSet.copyOf( FileUtils.loadSymbolList(params.getExistingFile("docIDsToScore"))); final ImmutableMap<Symbol, File> goldDocIDToFileMap = FileUtils.loadSymbolToFileMap( Files.asCharSource(params.getExistingFile("goldDocIDToFileMap"), Charsets.UTF_8)); final File outputDir = params.getCreatableDirectory("ereScoringOutput"); final SystemOutputLayout outputLayout = SystemOutputLayout.ParamParser.fromParamVal( params.getString("outputLayout")); final SystemOutputStore outputStore = outputLayout.open(params.getExistingDirectory("systemOutput")); final CoreNLPXMLLoader coreNLPXMLLoader = CoreNLPXMLLoader.builder(HeadFinders.<CoreNLPParseNode>getEnglishPTBHeadFinder()).build(); final boolean relaxUsingCORENLP = params.getBoolean("relaxUsingCoreNLP"); final ImmutableMap<Symbol, File> coreNLPProcessedRawDocs; if (relaxUsingCORENLP) { log.info("Relaxing scoring using CoreNLP"); coreNLPProcessedRawDocs = FileUtils.loadSymbolToFileMap( Files.asCharSource(params.getExistingFile("coreNLPDocIDMap"), Charsets.UTF_8)); } else { coreNLPProcessedRawDocs = ImmutableMap.of(); } log.info("Scoring over {} documents", docIDsToScore.size()); // on the gold side we take an ERE document as input final TypeToken<EREDocument> inputIsEREDoc = new TypeToken<EREDocument>() { }; // on the test side we take an AnswerKey, but we bundle it with the gold ERE document // for use in alignment later final TypeToken<EREDocAndResponses> inputIsEREDocAndAnswerKey = new TypeToken<EREDocAndResponses>() { }; final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input = InspectorTreeDSL.pairedInput(inputIsEREDoc, inputIsEREDocAndAnswerKey); // these will extract the scoring tuples from the KBP system input and ERE docs, respectively // we create these here because we will call their .finish method()s // at the end to record some statistics about alignment failures, // so we need to keep references to them final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor = new ResponsesAndLinkingFromKBPExtractor(coreNLPProcessedRawDocs, coreNLPXMLLoader, relaxUsingCORENLP, ontologyMapper, Files.asCharSink(new File(outputDir, "alignmentFailures.txt"), Charsets.UTF_8)); final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor = new ResponsesAndLinkingFromEREExtractor(EREToKBPEventOntologyMapper.create2016Mapping(), quoteFilter); // this sets it up so that everything fed to input will be scored in various ways setupScoring(input, responsesAndLinkingFromKBPExtractor, responsesAndLinkingFromEREExtractor, scoringEventObservers.values(), outputDir); // we want globally unique IDs here final ERELoader loader = ERELoader.builder().prefixDocIDToAllIDs(true).build(); for (final Symbol docID : docIDsToScore) { final File ereFileName = goldDocIDToFileMap.get(docID); if (ereFileName == null) { throw new RuntimeException("Missing key file for " + docID); } final EREDocument ereDoc = loader.loadFrom(ereFileName); // the LDC provides certain ERE documents with "-kbp" in the name. The -kbp is used by them // internally for some form of tracking but doesn't appear to the world, so we remove it. if (!ereDoc.getDocId().replace("-kbp", "").equals(docID.asString().replace(".kbp", ""))) { log.warn("Fetched document ID {} does not equal stored {}", ereDoc.getDocId(), docID); } final Iterable<Response> responses = filter(outputStore.read(docID).arguments().responses(), bannedRolesFilter); final ResponseLinking linking = ((DocumentSystemOutput2015) outputStore.read(docID)).linking(); linking.copyWithFilteredResponses(in(ImmutableSet.copyOf(responses))); // feed this ERE doc/ KBP output pair to the scoring network input.inspect(EvalPair.of(ereDoc, new EREDocAndResponses(ereDoc, responses, linking))); } // trigger the scoring network to write its summary files input.finish(); // log alignment failures responsesAndLinkingFromKBPExtractor.finish(); responsesAndLinkingFromEREExtractor.finish(); } private static final ImmutableSet<Symbol> BANNED_ROLES = SymbolUtils.setFrom("Position", "Fine", "Sentence"); private static final ImmutableSet<Symbol> ROLES_2016 = SymbolUtils .setFrom("Agent", "Artifact", "Attacker", "Audience", "Beneficiary", "Crime", "Destination", "Entity", "Giver", "Instrument", "Money", "Origin", "Person", "Place", "Position", "Recipient", "Target", "Thing", "Time", "Victim"); private static final ImmutableSet<Symbol> ALLOWED_ROLES_2016 = Sets.difference(ROLES_2016, BANNED_ROLES).immutableCopy(); private static final ImmutableSet<Symbol> linkableRealis = SymbolUtils.setFrom("Other", "Actual"); private static final Predicate<Response> bannedRolesFilter = new Predicate<Response>() { @Override public boolean apply(@Nullable final Response response) { return ALLOWED_ROLES_2016.contains(response.role()); } }; private static Function<EvalPair<? extends Iterable<? extends DocLevelEventArg>, ? extends Iterable<? extends DocLevelEventArg>>, ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> EXACT_MATCH_ALIGNER = EquivalenceBasedProvenancedAligner .forEquivalenceFunction(Functions.<DocLevelEventArg>identity()) .asFunction(); // this sets up a scoring network which is executed on every input private static void setupScoring( final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input, final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor, final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor, Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final File outputDir) { final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> inputAsResponsesAndLinking = transformRight(transformLeft(input, responsesAndLinkingFromEREExtractor), responsesAndLinkingFromKBPExtractor); final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredFor2016 = InspectorTreeDSL.transformBoth( inputAsResponsesAndLinking, ResponsesAndLinking.filterFunction(ARG_TYPE_IS_ALLOWED_FOR_2016)); final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForLifeDie = transformed(filteredFor2016, RestrictLifeInjureToLifeDieEvents.INSTANCE); // set up for event argument scoring in 2015 style eventArgumentScoringSetup(filteredForLifeDie, scoringEventObservers, outputDir); // set up for linking scoring in 2015 style linkingScoringSetup(filteredForLifeDie, outputDir); } private static void eventArgumentScoringSetup( final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> inputAsResponsesAndLinking, Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final File outputDir) { final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfScoringTuples = transformBoth(inputAsResponsesAndLinking, ResponsesAndLinkingFunctions.args()); final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfRealisNeutralizedTuples = transformBoth(inputAsResponsesAndLinking, NeutralizeRealis.INSTANCE); argScoringSetup(inputAsSetsOfScoringTuples, ImmutableList.<ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>of(), new File(outputDir, "withRealis")); // we apply scoring observers only to the realis neutralized version argScoringSetup(inputAsSetsOfRealisNeutralizedTuples, scoringEventObservers, new File(outputDir, "noRealis")); } private static void argScoringSetup( final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfScoringTuples, final Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final File outputDir) { // require exact match between the system arguments and the key responses final InspectorTreeNode<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> alignmentNode = transformed(inputAsSetsOfScoringTuples, EXACT_MATCH_ALIGNER); // overall F score final AggregateBinaryFScoresInspector<DocLevelEventArg, DocLevelEventArg> scoreAndWriteOverallFScore = AggregateBinaryFScoresInspector.createWithScoringObservers("aggregateF.txt", outputDir, scoringEventObservers); inspect(alignmentNode).with(scoreAndWriteOverallFScore); // "arg" score with weighted TP/FP final ArgumentScoringInspector argScorer = ArgumentScoringInspector.createOutputtingTo(outputDir); inspect(alignmentNode).with(argScorer); // log errors final BinaryErrorLogger<HasDocID, HasDocID> logWrongAnswers = BinaryErrorLogger .forStringifierAndOutputDir(Functions.<HasDocID>toStringFunction(), outputDir); inspect(alignmentNode).with(logWrongAnswers); final BinaryFScoreBootstrapStrategy perEventBootstrapStrategy = BinaryFScoreBootstrapStrategy.createBrokenDownBy("EventType", HasEventType.ExtractFunction.INSTANCE, outputDir); final BootstrapInspector breakdownScoresByEventTypeWithBootstrapping = BootstrapInspector.forStrategy(perEventBootstrapStrategy, 1000, new Random(0)); inspect(alignmentNode).with(breakdownScoresByEventTypeWithBootstrapping); } private static void linkingScoringSetup( final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> inputAsResponsesAndLinking, final File outputDir) { final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForRealis = transformBoth(inputAsResponsesAndLinking, ResponsesAndLinking.filterFunction(REALIS_ALLOWED_FOR_LINKING)); // withRealis { final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> linkingNode = transformBoth(filteredForRealis, ResponsesAndLinkingFunctions.linking()); // we throw out any system responses not found in the key before scoring linking final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> filteredNode = transformed(linkingNode, RestrictToLinking.INSTANCE); final LinkingInspector linkingInspector = LinkingInspector.createOutputtingTo(new File(outputDir, "withRealis")); inspect(filteredNode).with(linkingInspector); } // without realis { final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> neutralizedRealis = transformBoth(filteredForRealis, transformArgs(LinkingRealisNeutralizer.INSTANCE)); final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> linkingNode = transformBoth(neutralizedRealis, ResponsesAndLinkingFunctions.linking()); // we throw out any system responses not found in the key before scoring linking, after neutralizing realis final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> filteredNode = transformed(linkingNode, RestrictToLinking.INSTANCE); final LinkingInspector linkingInspector = LinkingInspector.createOutputtingTo(new File(outputDir, "noRealis")); inspect(filteredNode).with(linkingInspector); } } private static final Predicate<_DocLevelEventArg> ARG_TYPE_IS_ALLOWED_FOR_2016 = compose(in(ALLOWED_ROLES_2016), DocLevelEventArgFunctions.eventArgumentType()); private static final Predicate<_DocLevelEventArg> REALIS_ALLOWED_FOR_LINKING = compose(in(linkableRealis), DocLevelEventArgFunctions.realis()); private enum RestrictLifeInjureToLifeDieEvents implements Function<EvalPair<ResponsesAndLinking, ResponsesAndLinking>, EvalPair<ResponsesAndLinking, ResponsesAndLinking>> { INSTANCE; final Symbol LifeDie = Symbol.from("Life.Die"); @Override public EvalPair<ResponsesAndLinking, ResponsesAndLinking> apply( final EvalPair<ResponsesAndLinking, ResponsesAndLinking> input) { // find all Life.Die event arguments final ImmutableSet<DocLevelEventArg> keyArgs = ImmutableSet.copyOf(filter(input.key().args(), Predicates.compose(equalTo(LifeDie), DocLevelEventArgFunctions.eventType()))); // get all possible candidate Life.Injure event arguments that could be derived from these Life.Die arguments final ImmutableSet<DocLevelEventArg> argsToIgnore = ImmutableSet.copyOf(transform(keyArgs, LifeDieToLifeInjure.INSTANCE)); // filter both the ERE and the system input to ignore these derived arguments. return EvalPair.of(input.key().filter(not(in(argsToIgnore))), input.test().filter(not(in(argsToIgnore)))); } } private enum LifeDieToLifeInjure implements Function<DocLevelEventArg, DocLevelEventArg> { INSTANCE { final Symbol LifeInjure = Symbol.from("Life.Injure"); @Nullable @Override public DocLevelEventArg apply(@Nullable final DocLevelEventArg docLevelEventArg) { checkNotNull(docLevelEventArg); checkArgument(docLevelEventArg.eventType() .equalTo(RestrictLifeInjureToLifeDieEvents.INSTANCE.LifeDie)); return docLevelEventArg.withEventType(LifeInjure); } } } private static Function<? super ResponsesAndLinking, ResponsesAndLinking> transformArgs( final Function<? super DocLevelEventArg, DocLevelEventArg> transformer) { return new Function<ResponsesAndLinking, ResponsesAndLinking>() { @Override public ResponsesAndLinking apply(final ResponsesAndLinking responsesAndLinking) { return responsesAndLinking.transform(transformer); } }; } private enum LinkingRealisNeutralizer implements Function<DocLevelEventArg, DocLevelEventArg> { INSTANCE; static final Symbol NEUTRALIZED = Symbol.from("neutralized"); @Override public DocLevelEventArg apply(final DocLevelEventArg docLevelEventArg) { return docLevelEventArg.withRealis(NEUTRALIZED); } } private enum NeutralizeRealis implements Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>> { INSTANCE; static final Symbol NEUTRALIZED = Symbol.from("neutralized"); @Override public ImmutableSet<DocLevelEventArg> apply(final ResponsesAndLinking input) { final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder(); for (final DocLevelEventArg arg : input.args()) { ret.add(arg.withRealis(NEUTRALIZED)); } return ret.build(); } } private enum RestrictToLinking implements Function<EvalPair<DocLevelArgLinking, DocLevelArgLinking>, EvalPair<DocLevelArgLinking, DocLevelArgLinking>> { INSTANCE; @Override public EvalPair<DocLevelArgLinking, DocLevelArgLinking> apply( final EvalPair<DocLevelArgLinking, DocLevelArgLinking> input) { final DocLevelArgLinking newTest = input.test().filterArguments(in(input.key().allArguments())); return EvalPair.of(input.key(), newTest); } } private static final class ArgumentScoringInspector implements Inspector<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> { // beta as defined by the EAL task guidelines. private static final double beta = 0.25; private final File outputDir; private double scoreAggregator = 0.0; private int aggregateTPs = 0; private int aggregateFPs = 0; private int aggregateFNs = 0; final ImmutableMap.Builder<Symbol, Integer> truePositives = ImmutableMap.builder(); final ImmutableMap.Builder<Symbol, Integer> falsePositives = ImmutableMap.builder(); final ImmutableMap.Builder<Symbol, Integer> falseNegatives = ImmutableMap.builder(); final ImmutableMap.Builder<Symbol, Double> scores = ImmutableMap.builder(); private ArgumentScoringInspector(final File outputDir) { this.outputDir = outputDir; } public static ArgumentScoringInspector createOutputtingTo(final File outputDir) { return new ArgumentScoringInspector(outputDir); } @Override public void inspect( final ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg> evalPair) { // left is ERE, right is system output. final Iterable<DocLevelEventArg> args = concat(evalPair.allLeftItems(), evalPair.allRightItems()); if (Iterables.size(args) == 0) { log.warn("No output for eval pair {}", evalPair); return; } final Symbol docid = checkNotNull(getFirst(args, null)).docID(); log.info("Gathering arg scores for {}", docid); int docTPs = evalPair.leftAligned().size(); checkArgument(evalPair.leftAligned().equals(evalPair.rightAligned())); this.aggregateTPs += docTPs; int docFPs = evalPair.rightUnaligned().size(); this.aggregateFPs += docFPs; // scores are clipped at 0. double score = Math.max(docTPs - beta * docFPs, 0); int docFNs = evalPair.leftUnaligned().size(); aggregateFNs += docFNs; scoreAggregator += score; truePositives.put(docid, docTPs); falsePositives.put(docid, docFPs); falseNegatives.put(docid, docFNs); scores.put(docid, score); } @Override public void finish() throws IOException { final String scorePattern = "TP: %d, FP: %d, FN: %d, Score: %f\n"; // see guidelines section 7.3.1.1.4 for aggregating rules: // sum over per document contributions, divide by total number of TRFRs in the answer key // Math.max is to skip division by zero errors. final double overAllArgScore = 100 * scoreAggregator / Math.max(0.0 + aggregateFNs + aggregateTPs, 1.0); final String scoreString = String.format(scorePattern, aggregateTPs, aggregateFPs, aggregateFNs, overAllArgScore); Files.asCharSink(new File(outputDir, "argScores.txt"), Charsets.UTF_8).write(scoreString); final ImmutableMap<Symbol, Double> scores = this.scores.build(); final ImmutableMap<Symbol, Integer> falsePositives = this.falsePositives.build(); final ImmutableMap<Symbol, Integer> truePositives = this.truePositives.build(); final ImmutableMap<Symbol, Integer> falseNegatives = this.falseNegatives.build(); for (final Symbol docid : scores.keySet()) { final File docDir = new File(outputDir, docid.asString()); docDir.mkdirs(); final File docScore = new File(docDir, "argScores.txt"); // avoid dividing by zero final double normalizer = Math.max(truePositives.get(docid) + falseNegatives.get(docid), 1); // see guidelines referenced above // pretends that the corpus is a single document Files.asCharSink(docScore, Charsets.UTF_8).write(String .format(scorePattern, truePositives.get(docid), falsePositives.get(docid), falseNegatives.get(docid), 100 * scores.get(docid) / normalizer)); } } } private static final class LinkingInspector implements Inspector<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> { private final File outputDir; private final ImmutableMap.Builder<Symbol, ExplicitFMeasureInfo> countsB = ImmutableMap.builder(); private final ImmutableMap.Builder<Symbol, Integer> predictedCountsB = ImmutableMap.builder(); private final ImmutableMap.Builder<Symbol, Integer> actualCountsB = ImmutableMap.builder(); private final ImmutableMap.Builder<Symbol, Integer> linkingArgsCountB = ImmutableMap.builder(); private LinkingInspector(final File outputDir) { this.outputDir = outputDir; } public static LinkingInspector createOutputtingTo(final File outputFile) { return new LinkingInspector(outputFile); } @Override public void inspect( final EvalPair<DocLevelArgLinking, DocLevelArgLinking> item) { checkArgument(ImmutableSet.copyOf(concat(item.key())).containsAll( ImmutableSet.copyOf(concat(item.test()))), "Must contain only answers in test set!"); if (!item.key().docID().equalTo(item.test().docID())) { log.warn("DocIDs do not match: {} vs {}", item.key().docID(), item.test().docID()); } final ExplicitFMeasureInfo counts = LinkF1.create().score(item.test(), item.key()); final ImmutableSet<DocLevelEventArg> args = ImmutableSet.copyOf(concat( transform(concat(item.test().eventFrames(), item.key().eventFrames()), ScoringEventFrameFunctions.arguments()))); final Symbol docid = item.key().docID(); predictedCountsB.put(docid, ImmutableSet.copyOf(concat(item.test().eventFrames())).size()); actualCountsB.put(docid, ImmutableSet.copyOf(concat(item.key().eventFrames())).size()); countsB.put(docid, counts); linkingArgsCountB.put(docid, args.size()); } @Override public void finish() throws IOException { // copies logic from com.bbn.kbp.events2014.scorer.bin.AggregateResultWriter.computeLinkScores() final ImmutableMap<Symbol, ExplicitFMeasureInfo> counts = countsB.build(); final ImmutableMap<Symbol, Integer> predictedCounts = predictedCountsB.build(); final ImmutableMap<Symbol, Integer> actualCounts = actualCountsB.build(); final ImmutableMap<Symbol, Integer> linkingArgsCounts = linkingArgsCountB.build(); double precision = 0; double recall = 0; double f1 = 0; double linkNormalizerSum = 0; checkNotNull(counts, "Inspect must be called before Finish!"); for (final Symbol docid : counts.keySet()) { final File docOutput = new File(outputDir, docid.asString()); docOutput.mkdirs(); final PrintWriter outputWriter = new PrintWriter(new File(docOutput, "linkingF.txt")); outputWriter.println(counts.get(docid).toString()); outputWriter.close(); precision += counts.get(docid).precision() * predictedCounts.get(docid); recall += counts.get(docid).recall() * actualCounts.get(docid); f1 += counts.get(docid).f1() * actualCounts.get(docid); linkNormalizerSum += linkingArgsCounts.get(docid); } // the normalizer sum can't actually be negative here, but this minimizes divergence with the source logic. double aggregateLinkScore = (linkNormalizerSum > 0.0) ? f1 / linkNormalizerSum : 0.0; double aggregateLinkPrecision = (linkNormalizerSum > 0.0) ? precision / linkNormalizerSum : 0.0; double aggregateLinkRecall = (linkNormalizerSum > 0.0) ? recall / linkNormalizerSum : 0.0; final ExplicitFMeasureInfo aggregate = new ExplicitFMeasureInfo(aggregateLinkPrecision, aggregateLinkRecall, aggregateLinkScore); final PrintWriter outputWriter = new PrintWriter(new File(outputDir, "linkingF.txt")); outputWriter.println(aggregate); outputWriter.close(); } } private enum ERERealisEnum { generic, other, actual, } private enum ArgumentRealis { Generic, Actual, Other } private static final class ResponsesAndLinkingFromEREExtractor implements Function<EREDocument, ResponsesAndLinking>, Finishable { // for tracking things from the answer key discarded due to not being entity mentions private final Multiset<String> allGoldArgs = HashMultiset.create(); private final Multiset<String> discarded = HashMultiset.create(); private final Set<Symbol> unknownEventTypes = Sets.newHashSet(); private final Set<Symbol> unknownEventSubtypes = Sets.newHashSet(); private final Set<Symbol> unknownRoles = Sets.newHashSet(); private final SimpleEventOntologyMapper mapper; private final QuoteFilter quoteFilter; private ResponsesAndLinkingFromEREExtractor(final SimpleEventOntologyMapper mapper, final QuoteFilter quoteFilter) { this.mapper = checkNotNull(mapper); this.quoteFilter = checkNotNull(quoteFilter); } private boolean inQuotedRegion(String docId, ERESpan span) { // the kbp replacement is a hack to handle dry run docids having additional tracking information on them sometimes. return quoteFilter.isInQuote(Symbol.from(docId.replaceAll("-kbp", "")), CharOffsetSpan.of(span.asCharOffsets())); } @Override public ResponsesAndLinking apply(final EREDocument doc) { final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder(); // every event mention argument within a hopper is linked final DocLevelArgLinking.Builder linking = DocLevelArgLinking.builder() .docID(Symbol.from(doc.getDocId())); for (final EREEvent ereEvent : doc.getEvents()) { final ScoringEventFrame.Builder eventFrame = ScoringEventFrame.builder(); boolean addedArg = false; for (final EREEventMention ereEventMention : ereEvent.getEventMentions()) { // events from quoted regions are invalid if (!inQuotedRegion(doc.getDocId(), ereEventMention.getTrigger())) { for (final EREArgument ereArgument : ereEventMention.getArguments()) { if (!inQuotedRegion(doc.getDocId(), ereArgument.getExtent())) { // arguments from quoted regions are invalid final Symbol ereEventMentionType = Symbol.from(ereEventMention.getType()); final Symbol ereEventMentionSubtype = Symbol.from(ereEventMention.getSubtype()); final Symbol ereArgumentRole = Symbol.from(ereArgument.getRole()); final ArgumentRealis argumentRealis = getRealis(ereEventMention.getRealis(), ereArgument.getRealis().get()); boolean skip = false; if (!mapper.eventType(ereEventMentionType).isPresent()) { unknownEventTypes.add(ereEventMentionType); skip = true; } if (!mapper.eventRole(ereArgumentRole).isPresent()) { unknownRoles.add(ereArgumentRole); skip = true; } if (!mapper.eventSubtype(ereEventMentionSubtype).isPresent()) { unknownEventSubtypes.add(ereEventMentionSubtype); skip = true; } if (skip) { continue; } // type.subtype is Response format final String typeRoleKey = mapper.eventType(ereEventMentionType).get() + "." + mapper.eventSubtype(ereEventMentionSubtype).get() + "/" + mapper.eventRole(ereArgumentRole).get(); allGoldArgs.add(typeRoleKey); final DocLevelEventArg arg = DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId())) .eventType(Symbol.from(mapper.eventType(ereEventMentionType).get() + "." + mapper.eventSubtype(ereEventMentionSubtype).get())) .eventArgumentType(mapper.eventRole(ereArgumentRole).get()) .corefID(ScoringUtils.extractScoringEntity(ereArgument, doc).globalID()) .realis(Symbol.from(argumentRealis.name())).build(); ret.add(arg); // ban generic responses from ERE linking. if (!arg.realis().asString().equalsIgnoreCase(ERERealisEnum.generic.name())) { eventFrame.addArguments(arg); addedArg = true; } else { log.debug("Dropping ERE arg {} from linking in {} due to generic realis", arg, ereEventMention); } } else { log.info("Ignoring ERE event mention argument {} as within a quoted region", ereArgument); } } } else { log.info("Ignoring ERE event mention {} as within a quoted region", ereEventMention); } if (addedArg) { linking.addEventFrames(eventFrame.build()); } } } return ResponsesAndLinking.of(ret.build(), linking.build()); } private ArgumentRealis getRealis(final String ERERealis, final LinkRealis linkRealis) { // generic event mention realis overrides everything if (ERERealis.equals(ERERealisEnum.generic.name())) { return ArgumentRealis.Generic; } else { // if the argument is realis if (linkRealis.equals(LinkRealis.REALIS)) { if (ERERealis.equals(ERERealisEnum.other.name())) { return ArgumentRealis.Other; } else if (ERERealis.equals(ERERealisEnum.actual.name())) { return ArgumentRealis.Actual; } else { throw new RuntimeException( "Unknown ERERealis of type " + linkRealis); } } else { // if it's irrealis, override Actual with Other, Other is preserved. Generic is handled above. return ArgumentRealis.Other; } } } @Override public void finish() throws IOException { log.info( "Of {} gold event arguments, {} were discarded as non-entities", allGoldArgs.size(), discarded.size()); for (final String errKey : discarded.elementSet()) { if (discarded.count(errKey) > 0) { log.info("Of {} gold {} arguments, {} discarded ", +allGoldArgs.count(errKey), errKey, discarded.count(errKey)); } } if (!unknownEventTypes.isEmpty()) { log.info("The following ERE event types were ignored as outside the ontology: {}", SymbolUtils.byStringOrdering().immutableSortedCopy(unknownEventTypes)); } if (!unknownEventSubtypes.isEmpty()) { log.info("The following ERE event subtypes were ignored as outside the ontology: {}", SymbolUtils.byStringOrdering().immutableSortedCopy(unknownEventSubtypes)); } if (!unknownRoles.isEmpty()) { log.info("The following ERE event argument roles were ignored as outside the ontology: {}", SymbolUtils.byStringOrdering().immutableSortedCopy(unknownRoles)); } } } private static final class ResponsesAndLinkingFromKBPExtractor implements Function<EREDocAndResponses, ResponsesAndLinking>, Finishable { // each system item which fails to align to any reference item gets put in its own // coreference class, numbered using this sequence private IntIDSequence alignmentFailureIDs = IntIDSequence.startingFrom(0); private ImmutableSetMultimap.Builder<String, String> mentionAlignmentFailuresB = ImmutableSetMultimap.builder(); private Multiset<String> numResponses = HashMultiset.create(); private final ImmutableMap<Symbol, File> ereMapping; private final CoreNLPXMLLoader coreNLPXMLLoader; private final boolean relaxUsingCORENLP; private final EREToKBPEventOntologyMapper ontologyMapper; private final CharSink alignmentFailuresSink; public ResponsesAndLinkingFromKBPExtractor(final Map<Symbol, File> ereMapping, final CoreNLPXMLLoader coreNLPXMLLoader, final boolean relaxUsingCORENLP, final EREToKBPEventOntologyMapper ontologyMapper, final CharSink alignmentFailuresSink) { this.ereMapping = ImmutableMap.copyOf(ereMapping); this.coreNLPXMLLoader = coreNLPXMLLoader; this.relaxUsingCORENLP = relaxUsingCORENLP; this.ontologyMapper = checkNotNull(ontologyMapper); this.alignmentFailuresSink = checkNotNull(alignmentFailuresSink); } public ResponsesAndLinking apply(final EREDocAndResponses input) { final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder(); final Iterable<Response> responses = input.responses(); final EREDocument doc = input.ereDoc(); // Work around LDC document ID inconsistency; -kbp is used internally by the LDC as a form of // document tracking. Externally the difference does not matter so we just normalize the ID final Symbol ereID = Symbol.from(doc.getDocId().replace("-kbp", "")); final Optional<CoreNLPDocument> coreNLPDoc; final EREAligner ereAligner; try { coreNLPDoc = Optional.fromNullable(ereMapping.get(ereID)).isPresent() ? Optional .of(coreNLPXMLLoader.loadFrom(ereMapping.get(ereID))) : Optional.<CoreNLPDocument>absent(); checkState(coreNLPDoc.isPresent() || !relaxUsingCORENLP, "Must have CoreNLP document " + "if using Core NLP relaxation"); ereAligner = EREAligner.create(doc, coreNLPDoc, ontologyMapper); } catch (IOException e) { throw new RuntimeException(e); } final ImmutableMap.Builder<Response, DocLevelEventArg> responseToDocLevelArg = ImmutableMap.builder(); for (final Response response : responses) { final DocLevelEventArg res = resolveToERE(doc, ereAligner, response); ret.add(res); responseToDocLevelArg.put(response, res); } for (final Response response : input.linking().allResponses()) { if (response.realis().equals(KBPRealis.Generic)) { throw new TACKBPEALException("Generic Arguments are not allowed in linking"); } } return fromResponses(ImmutableSet.copyOf(input.responses()), responseToDocLevelArg.build(), input.linking()); } private DocLevelEventArg resolveToERE(final EREDocument doc, final EREAligner ereAligner, final Response response) { numResponses.add(errKey(response)); final Symbol realis = Symbol.from(response.realis().name()); final Optional<ScoringCorefID> alignedCorefIDOpt = ereAligner.argumentForResponse(response); if (!alignedCorefIDOpt.isPresent()) { log.info("Alignment failed for {}", response); mentionAlignmentFailuresB.put(errKey(response), response.toString()); } else if (alignedCorefIDOpt.get().scoringEntityType() .equals(ScoringEntityType.InsufficientEntityLevel)) { log.info("Insufficient entity level for {}", response); } // this increments the alignment failure ID regardless of success or failure, but // we don't care final ScoringCorefID alignedCorefID = alignedCorefIDOpt.or( // in case of alignment failure, we make a pseudo-entity from the CAS offsets // it will always be wrong, but will be consistent for the same extent appearing in // different event roles ScoringCorefID.of(ScoringEntityType.AlignmentFailure, response.canonicalArgument().charOffsetSpan().asCharOffsetRange().toString())); return DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId())) .eventType(response.type()).eventArgumentType(response.role()) .corefID(alignedCorefID.globalID()).realis(realis).build(); } ResponsesAndLinking fromResponses(final ImmutableSet<Response> originalResponses, final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg, final ResponseLinking responseLinking) { final DocLevelArgLinking.Builder linkingBuilder = DocLevelArgLinking.builder() .docID(responseLinking.docID()); for (final ResponseSet rs : responseLinking.responseSets()) { final ScoringEventFrame.Builder eventFrameBuilder = ScoringEventFrame.builder(); boolean addedArg = false; for (final Response response : rs) { if (responseToDocLevelEventArg.containsKey(response)) { eventFrameBuilder.addArguments(responseToDocLevelEventArg.get(response)); addedArg = true; } } if (addedArg) { linkingBuilder.addEventFrames(eventFrameBuilder.build()); } } return ResponsesAndLinking.of(responseToDocLevelEventArg.values(), linkingBuilder.build()); } public String errKey(Response r) { return r.type() + "/" + r.role(); } public void finish() throws IOException { final ImmutableSetMultimap<String, String> mentionAlignmentFailures = mentionAlignmentFailuresB.build(); log.info( "Of {} system responses, got {} mention alignment failures", numResponses.size(), mentionAlignmentFailures.size()); final StringBuilder msg = new StringBuilder(); for (final String errKey : numResponses.elementSet()) { final ImmutableSet<String> failuresForKey = mentionAlignmentFailures.get(errKey); if (failuresForKey != null) { msg.append("Of ").append(numResponses.count(errKey)).append(errKey) .append(" responses, ").append(failuresForKey.size()) .append(" mention alignment failures:\n") .append(StringUtils.unixNewlineJoiner().join(failuresForKey)).append("\n"); } } alignmentFailuresSink.write(msg.toString()); } } // code for running as a standalone executable public static void main(String[] argv) { // we wrap the main method in this way to // ensure a non-zero return value on failure try { trueMain(argv); } catch (Exception e) { e.printStackTrace(); System.exit(1); } } public static void trueMain(String[] argv) throws IOException { final Parameters params = Parameters.loadSerifStyle(new File(argv[0])); Guice.createInjector(new ScoreKBPAgainstERE.GuiceModule(params)) .getInstance(ScoreKBPAgainstERE.class).go(); } // sets up a plugin architecture for additional scoring observers public static final class GuiceModule extends AbstractModule { private final Parameters params; GuiceModule(final Parameters params) { this.params = checkNotNull(params); } @Override protected void configure() { bind(Parameters.class).toInstance(params); // declare that people can provide scoring observer plugins, even though none are // provided by default MapBinder.newMapBinder(binder(), TypeLiteral.get(String.class), new TypeLiteral<ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>() { }); try { bind(EREToKBPEventOntologyMapper.class) .toInstance(EREToKBPEventOntologyMapper.create2016Mapping()); } catch (IOException ioe) { throw new TACKBPEALException(ioe); } } @Provides QuoteFilter getQuoteFiler(Parameters params) throws IOException { return QuoteFilter.loadFrom(Files.asByteSource(params.getExistingFile("quoteFilter"))); } } } @Value.Immutable @Functional @TextGroupPackageImmutable abstract class _ResponsesAndLinking { @Value.Parameter public abstract ImmutableSet<DocLevelEventArg> args(); @Value.Parameter public abstract DocLevelArgLinking linking(); @Value.Check protected void check() { checkArgument(args().containsAll(ImmutableSet.copyOf(concat(linking())))); } public final ResponsesAndLinking filter(Predicate<? super DocLevelEventArg> predicate) { return ResponsesAndLinking.of( Iterables.filter(args(), predicate), linking().filterArguments(predicate)); } public final ResponsesAndLinking transform( final Function<? super DocLevelEventArg, DocLevelEventArg> transformer) { return ResponsesAndLinking .of(Iterables.transform(args(), transformer), linking().transformArguments(transformer)); } static final Function<ResponsesAndLinking, ResponsesAndLinking> filterFunction( final Predicate<? super DocLevelEventArg> predicate) { return new Function<ResponsesAndLinking, ResponsesAndLinking>() { @Override public ResponsesAndLinking apply(final ResponsesAndLinking input) { return input.filter(predicate); } }; } } final class EREDocAndResponses { private final EREDocument ereDoc; private final Iterable<Response> responses; private final ResponseLinking linking; public EREDocAndResponses(final EREDocument ereDoc, final Iterable<Response> responses, final ResponseLinking linking) { this.ereDoc = checkNotNull(ereDoc); this.responses = checkNotNull(responses); this.linking = checkNotNull(linking); } public EREDocument ereDoc() { return ereDoc; } public Iterable<Response> responses() { return responses; } public ResponseLinking linking() { return linking; } }
tac-kbp-eal-scorer/src/main/java/com/bbn/kbp/events/ScoreKBPAgainstERE.java
package com.bbn.kbp.events; import com.bbn.bue.common.Finishable; import com.bbn.bue.common.HasDocID; import com.bbn.bue.common.Inspector; import com.bbn.bue.common.IntIDSequence; import com.bbn.bue.common.StringUtils; import com.bbn.bue.common.TextGroupPackageImmutable; import com.bbn.bue.common.evaluation.AggregateBinaryFScoresInspector; import com.bbn.bue.common.evaluation.BinaryErrorLogger; import com.bbn.bue.common.evaluation.BinaryFScoreBootstrapStrategy; import com.bbn.bue.common.evaluation.BootstrapInspector; import com.bbn.bue.common.evaluation.EquivalenceBasedProvenancedAligner; import com.bbn.bue.common.evaluation.EvalPair; import com.bbn.bue.common.evaluation.InspectionNode; import com.bbn.bue.common.evaluation.InspectorTreeDSL; import com.bbn.bue.common.evaluation.InspectorTreeNode; import com.bbn.bue.common.evaluation.ProvenancedAlignment; import com.bbn.bue.common.evaluation.ScoringEventObserver; import com.bbn.bue.common.files.FileUtils; import com.bbn.bue.common.parameters.Parameters; import com.bbn.bue.common.symbols.Symbol; import com.bbn.bue.common.symbols.SymbolUtils; import com.bbn.kbp.events.ontology.EREToKBPEventOntologyMapper; import com.bbn.kbp.events.ontology.SimpleEventOntologyMapper; import com.bbn.kbp.events2014.CharOffsetSpan; import com.bbn.kbp.events2014.DocumentSystemOutput2015; import com.bbn.kbp.events2014.KBPRealis; import com.bbn.kbp.events2014.Response; import com.bbn.kbp.events2014.ResponseLinking; import com.bbn.kbp.events2014.ResponseSet; import com.bbn.kbp.events2014.SystemOutputLayout; import com.bbn.kbp.events2014.TACKBPEALException; import com.bbn.kbp.events2014.io.SystemOutputStore; import com.bbn.kbp.events2014.transformers.QuoteFilter; import com.bbn.kbp.linking.ExplicitFMeasureInfo; import com.bbn.kbp.linking.LinkF1; import com.bbn.nlp.corenlp.CoreNLPDocument; import com.bbn.nlp.corenlp.CoreNLPParseNode; import com.bbn.nlp.corenlp.CoreNLPXMLLoader; import com.bbn.nlp.corpora.ere.EREArgument; import com.bbn.nlp.corpora.ere.EREDocument; import com.bbn.nlp.corpora.ere.EREEvent; import com.bbn.nlp.corpora.ere.EREEventMention; import com.bbn.nlp.corpora.ere.ERELoader; import com.bbn.nlp.corpora.ere.ERESpan; import com.bbn.nlp.corpora.ere.LinkRealis; import com.bbn.nlp.events.HasEventType; import com.bbn.nlp.parsing.HeadFinders; import com.google.common.base.Charsets; import com.google.common.base.Function; import com.google.common.base.Functions; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.HashMultiset; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSetMultimap; import com.google.common.collect.ImmutableSortedMap; import com.google.common.collect.Iterables; import com.google.common.collect.Multiset; import com.google.common.collect.Sets; import com.google.common.io.CharSink; import com.google.common.io.Files; import com.google.common.reflect.TypeToken; import com.google.inject.AbstractModule; import com.google.inject.Guice; import com.google.inject.Inject; import com.google.inject.Provides; import com.google.inject.TypeLiteral; import com.google.inject.multibindings.MapBinder; import org.immutables.func.Functional; import org.immutables.value.Value; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.io.IOException; import java.io.PrintWriter; import java.util.Map; import java.util.Random; import java.util.Set; import javax.annotation.Nullable; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.inspect; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformBoth; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformLeft; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformRight; import static com.bbn.bue.common.evaluation.InspectorTreeDSL.transformed; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import static com.google.common.base.Predicates.compose; import static com.google.common.base.Predicates.equalTo; import static com.google.common.base.Predicates.in; import static com.google.common.base.Predicates.not; import static com.google.common.collect.Iterables.concat; import static com.google.common.collect.Iterables.filter; import static com.google.common.collect.Iterables.getFirst; import static com.google.common.collect.Iterables.transform; /** * Scores KBP 2016 event argument output against an ERE gold standard. Scoring is in terms of * (Event Type, Event Role, Entity) tuples. This program is an experimental rough draft and has a * number of limitations: <ul> <li>We only handle arguments which are entity mentions; others are * ignored according to the ERE structure on the gold side and by filtering out a (currently * hardcoded) set of argument roles on the system side.</li> <i>We map system responses to entities * by looking for an entity which has a mention which shares the character offsets of the base * filler exactly either by itself or by its nominal head (given in ERE). In the future we may * implement more lenient alignment strategies.</i> <li> Currently system responses which fail to * align to any entity at all are discarded rather than penalized.</li> </ul> */ public final class ScoreKBPAgainstERE { private static final Logger log = LoggerFactory.getLogger(ScoreKBPAgainstERE.class); private final EREToKBPEventOntologyMapper ontologyMapper; private ScoreKBPAgainstERE() { throw new UnsupportedOperationException(); } // left over from pre-Guice version private final Parameters params; private final ImmutableMap<String, ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers; // we exclude text in quoted regions froms scoring private final QuoteFilter quoteFilter; @Inject ScoreKBPAgainstERE( final Parameters params, final Map<String, ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final EREToKBPEventOntologyMapper ontologyMapper, final QuoteFilter quoteFilter) { this.params = checkNotNull(params); // we use a sorted map because the binding of plugins may be non-deterministic this.scoringEventObservers = ImmutableSortedMap.copyOf(scoringEventObservers); this.ontologyMapper = checkNotNull(ontologyMapper); this.quoteFilter = checkNotNull(quoteFilter); } public void go() throws IOException { log.info(params.dump()); final ImmutableSet<Symbol> docIDsToScore = ImmutableSet.copyOf( FileUtils.loadSymbolList(params.getExistingFile("docIDsToScore"))); final ImmutableMap<Symbol, File> goldDocIDToFileMap = FileUtils.loadSymbolToFileMap( Files.asCharSource(params.getExistingFile("goldDocIDToFileMap"), Charsets.UTF_8)); final File outputDir = params.getCreatableDirectory("ereScoringOutput"); final SystemOutputLayout outputLayout = SystemOutputLayout.ParamParser.fromParamVal( params.getString("outputLayout")); final SystemOutputStore outputStore = outputLayout.open(params.getExistingDirectory("systemOutput")); final CoreNLPXMLLoader coreNLPXMLLoader = CoreNLPXMLLoader.builder(HeadFinders.<CoreNLPParseNode>getEnglishPTBHeadFinder()).build(); final boolean relaxUsingCORENLP = params.getBoolean("relaxUsingCoreNLP"); final ImmutableMap<Symbol, File> coreNLPProcessedRawDocs; if (relaxUsingCORENLP) { log.info("Relaxing scoring using CoreNLP"); coreNLPProcessedRawDocs = FileUtils.loadSymbolToFileMap( Files.asCharSource(params.getExistingFile("coreNLPDocIDMap"), Charsets.UTF_8)); } else { coreNLPProcessedRawDocs = ImmutableMap.of(); } log.info("Scoring over {} documents", docIDsToScore.size()); // on the gold side we take an ERE document as input final TypeToken<EREDocument> inputIsEREDoc = new TypeToken<EREDocument>() { }; // on the test side we take an AnswerKey, but we bundle it with the gold ERE document // for use in alignment later final TypeToken<EREDocAndResponses> inputIsEREDocAndAnswerKey = new TypeToken<EREDocAndResponses>() { }; final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input = InspectorTreeDSL.pairedInput(inputIsEREDoc, inputIsEREDocAndAnswerKey); // these will extract the scoring tuples from the KBP system input and ERE docs, respectively // we create these here because we will call their .finish method()s // at the end to record some statistics about alignment failures, // so we need to keep references to them final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor = new ResponsesAndLinkingFromKBPExtractor(coreNLPProcessedRawDocs, coreNLPXMLLoader, relaxUsingCORENLP, ontologyMapper, Files.asCharSink(new File(outputDir, "alignmentFailures.txt"), Charsets.UTF_8)); final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor = new ResponsesAndLinkingFromEREExtractor(EREToKBPEventOntologyMapper.create2016Mapping(), quoteFilter); // this sets it up so that everything fed to input will be scored in various ways setupScoring(input, responsesAndLinkingFromKBPExtractor, responsesAndLinkingFromEREExtractor, scoringEventObservers.values(), outputDir); // we want globally unique IDs here final ERELoader loader = ERELoader.builder().prefixDocIDToAllIDs(true).build(); for (final Symbol docID : docIDsToScore) { final File ereFileName = goldDocIDToFileMap.get(docID); if (ereFileName == null) { throw new RuntimeException("Missing key file for " + docID); } final EREDocument ereDoc = loader.loadFrom(ereFileName); // the LDC provides certain ERE documents with "-kbp" in the name. The -kbp is used by them // internally for some form of tracking but doesn't appear to the world, so we remove it. if (!ereDoc.getDocId().replace("-kbp", "").equals(docID.asString().replace(".kbp", ""))) { log.warn("Fetched document ID {} does not equal stored {}", ereDoc.getDocId(), docID); } final Iterable<Response> responses = filter(outputStore.read(docID).arguments().responses(), bannedRolesFilter); final ResponseLinking linking = ((DocumentSystemOutput2015) outputStore.read(docID)).linking(); linking.copyWithFilteredResponses(in(ImmutableSet.copyOf(responses))); // feed this ERE doc/ KBP output pair to the scoring network input.inspect(EvalPair.of(ereDoc, new EREDocAndResponses(ereDoc, responses, linking))); } // trigger the scoring network to write its summary files input.finish(); // log alignment failures responsesAndLinkingFromKBPExtractor.finish(); responsesAndLinkingFromEREExtractor.finish(); } private static final ImmutableSet<Symbol> BANNED_ROLES = SymbolUtils.setFrom("Time", "Crime", "Position", "Fine", "Sentence"); private static final ImmutableSet<Symbol> ROLES_2016 = SymbolUtils .setFrom("Agent", "Artifact", "Attacker", "Audience", "Beneficiary", "Crime", "Destination", "Entity", "Giver", "Instrument", "Money", "Origin", "Person", "Place", "Position", "Recipient", "Target", "Thing", "Time", "Victim"); private static final ImmutableSet<Symbol> ALLOWED_ROLES_2016 = Sets.difference(ROLES_2016, BANNED_ROLES).immutableCopy(); private static final ImmutableSet<Symbol> linkableRealis = SymbolUtils.setFrom("Other", "Actual"); private static final Predicate<Response> bannedRolesFilter = new Predicate<Response>() { @Override public boolean apply(@Nullable final Response response) { return ALLOWED_ROLES_2016.contains(response.role()); } }; private static Function<EvalPair<? extends Iterable<? extends DocLevelEventArg>, ? extends Iterable<? extends DocLevelEventArg>>, ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> EXACT_MATCH_ALIGNER = EquivalenceBasedProvenancedAligner .forEquivalenceFunction(Functions.<DocLevelEventArg>identity()) .asFunction(); // this sets up a scoring network which is executed on every input private static void setupScoring( final InspectionNode<EvalPair<EREDocument, EREDocAndResponses>> input, final ResponsesAndLinkingFromKBPExtractor responsesAndLinkingFromKBPExtractor, final ResponsesAndLinkingFromEREExtractor responsesAndLinkingFromEREExtractor, Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final File outputDir) { final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> inputAsResponsesAndLinking = transformRight(transformLeft(input, responsesAndLinkingFromEREExtractor), responsesAndLinkingFromKBPExtractor); final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredFor2016 = InspectorTreeDSL.transformBoth( inputAsResponsesAndLinking, ResponsesAndLinking.filterFunction(ARG_TYPE_IS_ALLOWED_FOR_2016)); final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForLifeDie = transformed(filteredFor2016, RestrictLifeInjureToLifeDieEvents.INSTANCE); // set up for event argument scoring in 2015 style eventArgumentScoringSetup(filteredForLifeDie, scoringEventObservers, outputDir); // set up for linking scoring in 2015 style linkingScoringSetup(filteredForLifeDie, outputDir); } private static void eventArgumentScoringSetup( final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> inputAsResponsesAndLinking, Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final File outputDir) { final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfScoringTuples = transformBoth(inputAsResponsesAndLinking, ResponsesAndLinkingFunctions.args()); final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfRealisNeutralizedTuples = transformBoth(inputAsResponsesAndLinking, NeutralizeRealis.INSTANCE); argScoringSetup(inputAsSetsOfScoringTuples, ImmutableList.<ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>of(), new File(outputDir, "withRealis")); // we apply scoring observers only to the realis neutralized version argScoringSetup(inputAsSetsOfRealisNeutralizedTuples, scoringEventObservers, new File(outputDir, "noRealis")); } private static void argScoringSetup( final InspectorTreeNode<EvalPair<ImmutableSet<DocLevelEventArg>, ImmutableSet<DocLevelEventArg>>> inputAsSetsOfScoringTuples, final Iterable<? extends ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>> scoringEventObservers, final File outputDir) { // require exact match between the system arguments and the key responses final InspectorTreeNode<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> alignmentNode = transformed(inputAsSetsOfScoringTuples, EXACT_MATCH_ALIGNER); // overall F score final AggregateBinaryFScoresInspector<DocLevelEventArg, DocLevelEventArg> scoreAndWriteOverallFScore = AggregateBinaryFScoresInspector.createWithScoringObservers("aggregateF.txt", outputDir, scoringEventObservers); inspect(alignmentNode).with(scoreAndWriteOverallFScore); // "arg" score with weighted TP/FP final ArgumentScoringInspector argScorer = ArgumentScoringInspector.createOutputtingTo(outputDir); inspect(alignmentNode).with(argScorer); // log errors final BinaryErrorLogger<HasDocID, HasDocID> logWrongAnswers = BinaryErrorLogger .forStringifierAndOutputDir(Functions.<HasDocID>toStringFunction(), outputDir); inspect(alignmentNode).with(logWrongAnswers); final BinaryFScoreBootstrapStrategy perEventBootstrapStrategy = BinaryFScoreBootstrapStrategy.createBrokenDownBy("EventType", HasEventType.ExtractFunction.INSTANCE, outputDir); final BootstrapInspector breakdownScoresByEventTypeWithBootstrapping = BootstrapInspector.forStrategy(perEventBootstrapStrategy, 1000, new Random(0)); inspect(alignmentNode).with(breakdownScoresByEventTypeWithBootstrapping); } private static void linkingScoringSetup( final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> inputAsResponsesAndLinking, final File outputDir) { final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> filteredForRealis = transformBoth(inputAsResponsesAndLinking, ResponsesAndLinking.filterFunction(REALIS_ALLOWED_FOR_LINKING)); // withRealis { final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> linkingNode = transformBoth(filteredForRealis, ResponsesAndLinkingFunctions.linking()); // we throw out any system responses not found in the key before scoring linking final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> filteredNode = transformed(linkingNode, RestrictToLinking.INSTANCE); final LinkingInspector linkingInspector = LinkingInspector.createOutputtingTo(new File(outputDir, "withRealis")); inspect(filteredNode).with(linkingInspector); } // without realis { final InspectorTreeNode<EvalPair<ResponsesAndLinking, ResponsesAndLinking>> neutralizedRealis = transformBoth(filteredForRealis, transformArgs(LinkingRealisNeutralizer.INSTANCE)); final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> linkingNode = transformBoth(neutralizedRealis, ResponsesAndLinkingFunctions.linking()); // we throw out any system responses not found in the key before scoring linking, after neutralizing realis final InspectorTreeNode<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> filteredNode = transformed(linkingNode, RestrictToLinking.INSTANCE); final LinkingInspector linkingInspector = LinkingInspector.createOutputtingTo(new File(outputDir, "noRealis")); inspect(filteredNode).with(linkingInspector); } } private static final Predicate<_DocLevelEventArg> ARG_TYPE_IS_ALLOWED_FOR_2016 = compose(in(ALLOWED_ROLES_2016), DocLevelEventArgFunctions.eventArgumentType()); private static final Predicate<_DocLevelEventArg> REALIS_ALLOWED_FOR_LINKING = compose(in(linkableRealis), DocLevelEventArgFunctions.realis()); private enum RestrictLifeInjureToLifeDieEvents implements Function<EvalPair<ResponsesAndLinking, ResponsesAndLinking>, EvalPair<ResponsesAndLinking, ResponsesAndLinking>> { INSTANCE; final Symbol LifeDie = Symbol.from("Life.Die"); @Override public EvalPair<ResponsesAndLinking, ResponsesAndLinking> apply( final EvalPair<ResponsesAndLinking, ResponsesAndLinking> input) { // find all Life.Die event arguments final ImmutableSet<DocLevelEventArg> keyArgs = ImmutableSet.copyOf(filter(input.key().args(), Predicates.compose(equalTo(LifeDie), DocLevelEventArgFunctions.eventType()))); // get all possible candidate Life.Injure event arguments that could be derived from these Life.Die arguments final ImmutableSet<DocLevelEventArg> argsToIgnore = ImmutableSet.copyOf(transform(keyArgs, LifeDieToLifeInjure.INSTANCE)); // filter both the ERE and the system input to ignore these derived arguments. return EvalPair.of(input.key().filter(not(in(argsToIgnore))), input.test().filter(not(in(argsToIgnore)))); } } private enum LifeDieToLifeInjure implements Function<DocLevelEventArg, DocLevelEventArg> { INSTANCE { final Symbol LifeInjure = Symbol.from("Life.Injure"); @Nullable @Override public DocLevelEventArg apply(@Nullable final DocLevelEventArg docLevelEventArg) { checkNotNull(docLevelEventArg); checkArgument(docLevelEventArg.eventType() .equalTo(RestrictLifeInjureToLifeDieEvents.INSTANCE.LifeDie)); return docLevelEventArg.withEventType(LifeInjure); } } } private static Function<? super ResponsesAndLinking, ResponsesAndLinking> transformArgs( final Function<? super DocLevelEventArg, DocLevelEventArg> transformer) { return new Function<ResponsesAndLinking, ResponsesAndLinking>() { @Override public ResponsesAndLinking apply(final ResponsesAndLinking responsesAndLinking) { return responsesAndLinking.transform(transformer); } }; } private enum LinkingRealisNeutralizer implements Function<DocLevelEventArg, DocLevelEventArg> { INSTANCE; static final Symbol NEUTRALIZED = Symbol.from("neutralized"); @Override public DocLevelEventArg apply(final DocLevelEventArg docLevelEventArg) { return docLevelEventArg.withRealis(NEUTRALIZED); } } private enum NeutralizeRealis implements Function<ResponsesAndLinking, ImmutableSet<DocLevelEventArg>> { INSTANCE; static final Symbol NEUTRALIZED = Symbol.from("neutralized"); @Override public ImmutableSet<DocLevelEventArg> apply(final ResponsesAndLinking input) { final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder(); for (final DocLevelEventArg arg : input.args()) { ret.add(arg.withRealis(NEUTRALIZED)); } return ret.build(); } } private enum RestrictToLinking implements Function<EvalPair<DocLevelArgLinking, DocLevelArgLinking>, EvalPair<DocLevelArgLinking, DocLevelArgLinking>> { INSTANCE; @Override public EvalPair<DocLevelArgLinking, DocLevelArgLinking> apply( final EvalPair<DocLevelArgLinking, DocLevelArgLinking> input) { final DocLevelArgLinking newTest = input.test().filterArguments(in(input.key().allArguments())); return EvalPair.of(input.key(), newTest); } } private static final class ArgumentScoringInspector implements Inspector<ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg>> { // beta as defined by the EAL task guidelines. private static final double beta = 0.25; private final File outputDir; private double scoreAggregator = 0.0; private int aggregateTPs = 0; private int aggregateFPs = 0; private int aggregateFNs = 0; final ImmutableMap.Builder<Symbol, Integer> truePositives = ImmutableMap.builder(); final ImmutableMap.Builder<Symbol, Integer> falsePositives = ImmutableMap.builder(); final ImmutableMap.Builder<Symbol, Integer> falseNegatives = ImmutableMap.builder(); final ImmutableMap.Builder<Symbol, Double> scores = ImmutableMap.builder(); private ArgumentScoringInspector(final File outputDir) { this.outputDir = outputDir; } public static ArgumentScoringInspector createOutputtingTo(final File outputDir) { return new ArgumentScoringInspector(outputDir); } @Override public void inspect( final ProvenancedAlignment<DocLevelEventArg, DocLevelEventArg, DocLevelEventArg, DocLevelEventArg> evalPair) { // left is ERE, right is system output. final Iterable<DocLevelEventArg> args = concat(evalPair.allLeftItems(), evalPair.allRightItems()); if (Iterables.size(args) == 0) { log.warn("No output for eval pair {}", evalPair); return; } final Symbol docid = checkNotNull(getFirst(args, null)).docID(); log.info("Gathering arg scores for {}", docid); int docTPs = evalPair.leftAligned().size(); checkArgument(evalPair.leftAligned().equals(evalPair.rightAligned())); this.aggregateTPs += docTPs; int docFPs = evalPair.rightUnaligned().size(); this.aggregateFPs += docFPs; // scores are clipped at 0. double score = Math.max(docTPs - beta * docFPs, 0); int docFNs = evalPair.leftUnaligned().size(); aggregateFNs += docFNs; scoreAggregator += score; truePositives.put(docid, docTPs); falsePositives.put(docid, docFPs); falseNegatives.put(docid, docFNs); scores.put(docid, score); } @Override public void finish() throws IOException { final String scorePattern = "TP: %d, FP: %d, FN: %d, Score: %f\n"; // see guidelines section 7.3.1.1.4 for aggregating rules: // sum over per document contributions, divide by total number of TRFRs in the answer key // Math.max is to skip division by zero errors. final double overAllArgScore = 100 * scoreAggregator / Math.max(0.0 + aggregateFNs + aggregateTPs, 1.0); final String scoreString = String.format(scorePattern, aggregateTPs, aggregateFPs, aggregateFNs, overAllArgScore); Files.asCharSink(new File(outputDir, "argScores.txt"), Charsets.UTF_8).write(scoreString); final ImmutableMap<Symbol, Double> scores = this.scores.build(); final ImmutableMap<Symbol, Integer> falsePositives = this.falsePositives.build(); final ImmutableMap<Symbol, Integer> truePositives = this.truePositives.build(); final ImmutableMap<Symbol, Integer> falseNegatives = this.falseNegatives.build(); for (final Symbol docid : scores.keySet()) { final File docDir = new File(outputDir, docid.asString()); docDir.mkdirs(); final File docScore = new File(docDir, "argScores.txt"); // avoid dividing by zero final double normalizer = Math.max(truePositives.get(docid) + falseNegatives.get(docid), 1); // see guidelines referenced above // pretends that the corpus is a single document Files.asCharSink(docScore, Charsets.UTF_8).write(String .format(scorePattern, truePositives.get(docid), falsePositives.get(docid), falseNegatives.get(docid), 100 * scores.get(docid) / normalizer)); } } } private static final class LinkingInspector implements Inspector<EvalPair<DocLevelArgLinking, DocLevelArgLinking>> { private final File outputDir; private final ImmutableMap.Builder<Symbol, ExplicitFMeasureInfo> countsB = ImmutableMap.builder(); private final ImmutableMap.Builder<Symbol, Integer> predictedCountsB = ImmutableMap.builder(); private final ImmutableMap.Builder<Symbol, Integer> actualCountsB = ImmutableMap.builder(); private final ImmutableMap.Builder<Symbol, Integer> linkingArgsCountB = ImmutableMap.builder(); private LinkingInspector(final File outputDir) { this.outputDir = outputDir; } public static LinkingInspector createOutputtingTo(final File outputFile) { return new LinkingInspector(outputFile); } @Override public void inspect( final EvalPair<DocLevelArgLinking, DocLevelArgLinking> item) { checkArgument(ImmutableSet.copyOf(concat(item.key())).containsAll( ImmutableSet.copyOf(concat(item.test()))), "Must contain only answers in test set!"); if (!item.key().docID().equalTo(item.test().docID())) { log.warn("DocIDs do not match: {} vs {}", item.key().docID(), item.test().docID()); } final ExplicitFMeasureInfo counts = LinkF1.create().score(item.test(), item.key()); final ImmutableSet<DocLevelEventArg> args = ImmutableSet.copyOf(concat( transform(concat(item.test().eventFrames(), item.key().eventFrames()), ScoringEventFrameFunctions.arguments()))); final Symbol docid = item.key().docID(); predictedCountsB.put(docid, ImmutableSet.copyOf(concat(item.test().eventFrames())).size()); actualCountsB.put(docid, ImmutableSet.copyOf(concat(item.key().eventFrames())).size()); countsB.put(docid, counts); linkingArgsCountB.put(docid, args.size()); } @Override public void finish() throws IOException { // copies logic from com.bbn.kbp.events2014.scorer.bin.AggregateResultWriter.computeLinkScores() final ImmutableMap<Symbol, ExplicitFMeasureInfo> counts = countsB.build(); final ImmutableMap<Symbol, Integer> predictedCounts = predictedCountsB.build(); final ImmutableMap<Symbol, Integer> actualCounts = actualCountsB.build(); final ImmutableMap<Symbol, Integer> linkingArgsCounts = linkingArgsCountB.build(); double precision = 0; double recall = 0; double f1 = 0; double linkNormalizerSum = 0; checkNotNull(counts, "Inspect must be called before Finish!"); for (final Symbol docid : counts.keySet()) { final File docOutput = new File(outputDir, docid.asString()); docOutput.mkdirs(); final PrintWriter outputWriter = new PrintWriter(new File(docOutput, "linkingF.txt")); outputWriter.println(counts.get(docid).toString()); outputWriter.close(); precision += counts.get(docid).precision() * predictedCounts.get(docid); recall += counts.get(docid).recall() * actualCounts.get(docid); f1 += counts.get(docid).f1() * actualCounts.get(docid); linkNormalizerSum += linkingArgsCounts.get(docid); } // the normalizer sum can't actually be negative here, but this minimizes divergence with the source logic. double aggregateLinkScore = (linkNormalizerSum > 0.0) ? f1 / linkNormalizerSum : 0.0; double aggregateLinkPrecision = (linkNormalizerSum > 0.0) ? precision / linkNormalizerSum : 0.0; double aggregateLinkRecall = (linkNormalizerSum > 0.0) ? recall / linkNormalizerSum : 0.0; final ExplicitFMeasureInfo aggregate = new ExplicitFMeasureInfo(aggregateLinkPrecision, aggregateLinkRecall, aggregateLinkScore); final PrintWriter outputWriter = new PrintWriter(new File(outputDir, "linkingF.txt")); outputWriter.println(aggregate); outputWriter.close(); } } private enum ERERealisEnum { generic, other, actual, } private enum ArgumentRealis { Generic, Actual, Other } private static final class ResponsesAndLinkingFromEREExtractor implements Function<EREDocument, ResponsesAndLinking>, Finishable { // for tracking things from the answer key discarded due to not being entity mentions private final Multiset<String> allGoldArgs = HashMultiset.create(); private final Multiset<String> discarded = HashMultiset.create(); private final Set<Symbol> unknownEventTypes = Sets.newHashSet(); private final Set<Symbol> unknownEventSubtypes = Sets.newHashSet(); private final Set<Symbol> unknownRoles = Sets.newHashSet(); private final SimpleEventOntologyMapper mapper; private final QuoteFilter quoteFilter; private ResponsesAndLinkingFromEREExtractor(final SimpleEventOntologyMapper mapper, final QuoteFilter quoteFilter) { this.mapper = checkNotNull(mapper); this.quoteFilter = checkNotNull(quoteFilter); } private boolean inQuotedRegion(String docId, ERESpan span) { // the kbp replacement is a hack to handle dry run docids having additional tracking information on them sometimes. return quoteFilter.isInQuote(Symbol.from(docId.replaceAll("-kbp", "")), CharOffsetSpan.of(span.asCharOffsets())); } @Override public ResponsesAndLinking apply(final EREDocument doc) { final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder(); // every event mention argument within a hopper is linked final DocLevelArgLinking.Builder linking = DocLevelArgLinking.builder() .docID(Symbol.from(doc.getDocId())); for (final EREEvent ereEvent : doc.getEvents()) { final ScoringEventFrame.Builder eventFrame = ScoringEventFrame.builder(); boolean addedArg = false; for (final EREEventMention ereEventMention : ereEvent.getEventMentions()) { // events from quoted regions are invalid if (!inQuotedRegion(doc.getDocId(), ereEventMention.getTrigger())) { for (final EREArgument ereArgument : ereEventMention.getArguments()) { if (!inQuotedRegion(doc.getDocId(), ereArgument.getExtent())) { // arguments from quoted regions are invalid final Symbol ereEventMentionType = Symbol.from(ereEventMention.getType()); final Symbol ereEventMentionSubtype = Symbol.from(ereEventMention.getSubtype()); final Symbol ereArgumentRole = Symbol.from(ereArgument.getRole()); final ArgumentRealis argumentRealis = getRealis(ereEventMention.getRealis(), ereArgument.getRealis().get()); boolean skip = false; if (!mapper.eventType(ereEventMentionType).isPresent()) { unknownEventTypes.add(ereEventMentionType); skip = true; } if (!mapper.eventRole(ereArgumentRole).isPresent()) { unknownRoles.add(ereArgumentRole); skip = true; } if (!mapper.eventSubtype(ereEventMentionSubtype).isPresent()) { unknownEventSubtypes.add(ereEventMentionSubtype); skip = true; } if (skip) { continue; } // type.subtype is Response format final String typeRoleKey = mapper.eventType(ereEventMentionType).get() + "." + mapper.eventSubtype(ereEventMentionSubtype).get() + "/" + mapper.eventRole(ereArgumentRole).get(); allGoldArgs.add(typeRoleKey); final DocLevelEventArg arg = DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId())) .eventType(Symbol.from(mapper.eventType(ereEventMentionType).get() + "." + mapper.eventSubtype(ereEventMentionSubtype).get())) .eventArgumentType(mapper.eventRole(ereArgumentRole).get()) .corefID(ScoringUtils.extractScoringEntity(ereArgument, doc).globalID()) .realis(Symbol.from(argumentRealis.name())).build(); ret.add(arg); // ban generic responses from ERE linking. if (!arg.realis().asString().equalsIgnoreCase(ERERealisEnum.generic.name())) { eventFrame.addArguments(arg); addedArg = true; } else { log.debug("Dropping ERE arg {} from linking in {} due to generic realis", arg, ereEventMention); } } else { log.info("Ignoring ERE event mention argument {} as within a quoted region", ereArgument); } } } else { log.info("Ignoring ERE event mention {} as within a quoted region", ereEventMention); } if (addedArg) { linking.addEventFrames(eventFrame.build()); } } } return ResponsesAndLinking.of(ret.build(), linking.build()); } private ArgumentRealis getRealis(final String ERERealis, final LinkRealis linkRealis) { // generic event mention realis overrides everything if (ERERealis.equals(ERERealisEnum.generic.name())) { return ArgumentRealis.Generic; } else { // if the argument is realis if (linkRealis.equals(LinkRealis.REALIS)) { if (ERERealis.equals(ERERealisEnum.other.name())) { return ArgumentRealis.Other; } else if (ERERealis.equals(ERERealisEnum.actual.name())) { return ArgumentRealis.Actual; } else { throw new RuntimeException( "Unknown ERERealis of type " + linkRealis); } } else { // if it's irrealis, override Actual with Other, Other is preserved. Generic is handled above. return ArgumentRealis.Other; } } } @Override public void finish() throws IOException { log.info( "Of {} gold event arguments, {} were discarded as non-entities", allGoldArgs.size(), discarded.size()); for (final String errKey : discarded.elementSet()) { if (discarded.count(errKey) > 0) { log.info("Of {} gold {} arguments, {} discarded ", +allGoldArgs.count(errKey), errKey, discarded.count(errKey)); } } if (!unknownEventTypes.isEmpty()) { log.info("The following ERE event types were ignored as outside the ontology: {}", SymbolUtils.byStringOrdering().immutableSortedCopy(unknownEventTypes)); } if (!unknownEventSubtypes.isEmpty()) { log.info("The following ERE event subtypes were ignored as outside the ontology: {}", SymbolUtils.byStringOrdering().immutableSortedCopy(unknownEventSubtypes)); } if (!unknownRoles.isEmpty()) { log.info("The following ERE event argument roles were ignored as outside the ontology: {}", SymbolUtils.byStringOrdering().immutableSortedCopy(unknownRoles)); } } } private static final class ResponsesAndLinkingFromKBPExtractor implements Function<EREDocAndResponses, ResponsesAndLinking>, Finishable { // each system item which fails to align to any reference item gets put in its own // coreference class, numbered using this sequence private IntIDSequence alignmentFailureIDs = IntIDSequence.startingFrom(0); private ImmutableSetMultimap.Builder<String, String> mentionAlignmentFailuresB = ImmutableSetMultimap.builder(); private Multiset<String> numResponses = HashMultiset.create(); private final ImmutableMap<Symbol, File> ereMapping; private final CoreNLPXMLLoader coreNLPXMLLoader; private final boolean relaxUsingCORENLP; private final EREToKBPEventOntologyMapper ontologyMapper; private final CharSink alignmentFailuresSink; public ResponsesAndLinkingFromKBPExtractor(final Map<Symbol, File> ereMapping, final CoreNLPXMLLoader coreNLPXMLLoader, final boolean relaxUsingCORENLP, final EREToKBPEventOntologyMapper ontologyMapper, final CharSink alignmentFailuresSink) { this.ereMapping = ImmutableMap.copyOf(ereMapping); this.coreNLPXMLLoader = coreNLPXMLLoader; this.relaxUsingCORENLP = relaxUsingCORENLP; this.ontologyMapper = checkNotNull(ontologyMapper); this.alignmentFailuresSink = checkNotNull(alignmentFailuresSink); } public ResponsesAndLinking apply(final EREDocAndResponses input) { final ImmutableSet.Builder<DocLevelEventArg> ret = ImmutableSet.builder(); final Iterable<Response> responses = input.responses(); final EREDocument doc = input.ereDoc(); // Work around LDC document ID inconsistency; -kbp is used internally by the LDC as a form of // document tracking. Externally the difference does not matter so we just normalize the ID final Symbol ereID = Symbol.from(doc.getDocId().replace("-kbp", "")); final Optional<CoreNLPDocument> coreNLPDoc; final EREAligner ereAligner; try { coreNLPDoc = Optional.fromNullable(ereMapping.get(ereID)).isPresent() ? Optional .of(coreNLPXMLLoader.loadFrom(ereMapping.get(ereID))) : Optional.<CoreNLPDocument>absent(); checkState(coreNLPDoc.isPresent() || !relaxUsingCORENLP, "Must have CoreNLP document " + "if using Core NLP relaxation"); ereAligner = EREAligner.create(doc, coreNLPDoc, ontologyMapper); } catch (IOException e) { throw new RuntimeException(e); } final ImmutableMap.Builder<Response, DocLevelEventArg> responseToDocLevelArg = ImmutableMap.builder(); for (final Response response : responses) { final DocLevelEventArg res = resolveToERE(doc, ereAligner, response); ret.add(res); responseToDocLevelArg.put(response, res); } for (final Response response : input.linking().allResponses()) { if (response.realis().equals(KBPRealis.Generic)) { throw new TACKBPEALException("Generic Arguments are not allowed in linking"); } } return fromResponses(ImmutableSet.copyOf(input.responses()), responseToDocLevelArg.build(), input.linking()); } private DocLevelEventArg resolveToERE(final EREDocument doc, final EREAligner ereAligner, final Response response) { numResponses.add(errKey(response)); final Symbol realis = Symbol.from(response.realis().name()); final Optional<ScoringCorefID> alignedCorefIDOpt = ereAligner.argumentForResponse(response); if (!alignedCorefIDOpt.isPresent()) { log.info("Alignment failed for {}", response); mentionAlignmentFailuresB.put(errKey(response), response.toString()); } else if (alignedCorefIDOpt.get().scoringEntityType() .equals(ScoringEntityType.InsufficientEntityLevel)) { log.info("Insufficient entity level for {}", response); } // this increments the alignment failure ID regardless of success or failure, but // we don't care final ScoringCorefID alignedCorefID = alignedCorefIDOpt.or( // in case of alignment failure, we make a pseudo-entity from the CAS offsets // it will always be wrong, but will be consistent for the same extent appearing in // different event roles ScoringCorefID.of(ScoringEntityType.AlignmentFailure, response.canonicalArgument().charOffsetSpan().asCharOffsetRange().toString())); return DocLevelEventArg.builder().docID(Symbol.from(doc.getDocId())) .eventType(response.type()).eventArgumentType(response.role()) .corefID(alignedCorefID.globalID()).realis(realis).build(); } ResponsesAndLinking fromResponses(final ImmutableSet<Response> originalResponses, final ImmutableMap<Response, DocLevelEventArg> responseToDocLevelEventArg, final ResponseLinking responseLinking) { final DocLevelArgLinking.Builder linkingBuilder = DocLevelArgLinking.builder() .docID(responseLinking.docID()); for (final ResponseSet rs : responseLinking.responseSets()) { final ScoringEventFrame.Builder eventFrameBuilder = ScoringEventFrame.builder(); boolean addedArg = false; for (final Response response : rs) { if (responseToDocLevelEventArg.containsKey(response)) { eventFrameBuilder.addArguments(responseToDocLevelEventArg.get(response)); addedArg = true; } } if (addedArg) { linkingBuilder.addEventFrames(eventFrameBuilder.build()); } } return ResponsesAndLinking.of(responseToDocLevelEventArg.values(), linkingBuilder.build()); } public String errKey(Response r) { return r.type() + "/" + r.role(); } public void finish() throws IOException { final ImmutableSetMultimap<String, String> mentionAlignmentFailures = mentionAlignmentFailuresB.build(); log.info( "Of {} system responses, got {} mention alignment failures", numResponses.size(), mentionAlignmentFailures.size()); final StringBuilder msg = new StringBuilder(); for (final String errKey : numResponses.elementSet()) { final ImmutableSet<String> failuresForKey = mentionAlignmentFailures.get(errKey); if (failuresForKey != null) { msg.append("Of ").append(numResponses.count(errKey)).append(errKey) .append(" responses, ").append(failuresForKey.size()) .append(" mention alignment failures:\n") .append(StringUtils.unixNewlineJoiner().join(failuresForKey)).append("\n"); } } alignmentFailuresSink.write(msg.toString()); } } // code for running as a standalone executable public static void main(String[] argv) { // we wrap the main method in this way to // ensure a non-zero return value on failure try { trueMain(argv); } catch (Exception e) { e.printStackTrace(); System.exit(1); } } public static void trueMain(String[] argv) throws IOException { final Parameters params = Parameters.loadSerifStyle(new File(argv[0])); Guice.createInjector(new ScoreKBPAgainstERE.GuiceModule(params)) .getInstance(ScoreKBPAgainstERE.class).go(); } // sets up a plugin architecture for additional scoring observers public static final class GuiceModule extends AbstractModule { private final Parameters params; GuiceModule(final Parameters params) { this.params = checkNotNull(params); } @Override protected void configure() { bind(Parameters.class).toInstance(params); // declare that people can provide scoring observer plugins, even though none are // provided by default MapBinder.newMapBinder(binder(), TypeLiteral.get(String.class), new TypeLiteral<ScoringEventObserver<DocLevelEventArg, DocLevelEventArg>>() { }); try { bind(EREToKBPEventOntologyMapper.class) .toInstance(EREToKBPEventOntologyMapper.create2016Mapping()); } catch (IOException ioe) { throw new TACKBPEALException(ioe); } } @Provides QuoteFilter getQuoteFiler(Parameters params) throws IOException { return QuoteFilter.loadFrom(Files.asByteSource(params.getExistingFile("quoteFilter"))); } } } @Value.Immutable @Functional @TextGroupPackageImmutable abstract class _ResponsesAndLinking { @Value.Parameter public abstract ImmutableSet<DocLevelEventArg> args(); @Value.Parameter public abstract DocLevelArgLinking linking(); @Value.Check protected void check() { checkArgument(args().containsAll(ImmutableSet.copyOf(concat(linking())))); } public final ResponsesAndLinking filter(Predicate<? super DocLevelEventArg> predicate) { return ResponsesAndLinking.of( Iterables.filter(args(), predicate), linking().filterArguments(predicate)); } public final ResponsesAndLinking transform( final Function<? super DocLevelEventArg, DocLevelEventArg> transformer) { return ResponsesAndLinking .of(Iterables.transform(args(), transformer), linking().transformArguments(transformer)); } static final Function<ResponsesAndLinking, ResponsesAndLinking> filterFunction( final Predicate<? super DocLevelEventArg> predicate) { return new Function<ResponsesAndLinking, ResponsesAndLinking>() { @Override public ResponsesAndLinking apply(final ResponsesAndLinking input) { return input.filter(predicate); } }; } } final class EREDocAndResponses { private final EREDocument ereDoc; private final Iterable<Response> responses; private final ResponseLinking linking; public EREDocAndResponses(final EREDocument ereDoc, final Iterable<Response> responses, final ResponseLinking linking) { this.ereDoc = checkNotNull(ereDoc); this.responses = checkNotNull(responses); this.linking = checkNotNull(linking); } public EREDocument ereDoc() { return ereDoc; } public Iterable<Response> responses() { return responses; } public ResponseLinking linking() { return linking; } }
Restore scoring of Crime and Time These were in the guidelines, but were erroneously omitted
tac-kbp-eal-scorer/src/main/java/com/bbn/kbp/events/ScoreKBPAgainstERE.java
Restore scoring of Crime and Time
Java
mit
9316bef6f4646b4f2016f6489af5e42e71b58859
0
wizzardo/Tools,wizzardo/Tools
package org.bordl.xml; /** * @author: moxa * Date: 12/24/12 */ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author: moxa * Date: 12/23/12 */ public class Node { protected Map<String, String> attributes; protected List<Node> children; protected String name; protected Node parent; private static final Set<String> selfClosedTags = new HashSet<String>(); static { selfClosedTags.add("area"); selfClosedTags.add("base"); selfClosedTags.add("br"); selfClosedTags.add("col"); selfClosedTags.add("command"); selfClosedTags.add("embed"); selfClosedTags.add("hr"); selfClosedTags.add("img"); selfClosedTags.add("input"); selfClosedTags.add("keygen"); selfClosedTags.add("link"); selfClosedTags.add("meta"); selfClosedTags.add("param"); selfClosedTags.add("source"); selfClosedTags.add("track"); selfClosedTags.add("wbr"); selfClosedTags.add("!doctype"); } public Node(String name) { this.name = name; } protected Node() { } public void attribute(String attributeName, String value) { if (attributes == null) attributes = new LinkedHashMap<String, String>(); attributes.put(attributeName, value); } public void attr(String attributeName, String value) { if (attributes == null) attributes = new LinkedHashMap<String, String>(); attributes.put(attributeName, value); } public String attribute(String attributeName) { if (attributes == null) return null; return attributes.get(attributeName); } public String attr(String attributeName) { if (attributes == null) return null; return attributes.get(attributeName); } public Set<String> attributesNames() { return attributes.keySet(); } public Map<String, String> attributes() { if (attributes == null) { attributes = new LinkedHashMap<String, String>(); } return attributes; } public List<Node> children() { if (children == null) { children = new ArrayList<Node>(); } return children; } public Node get(int i) { return children().get(i); } public Node first() { return children().get(0); } public Node last() { return children().get(children.size() - 1); } public boolean isEmpty() { return children == null || children.isEmpty(); } public int level() { int l = 0; Node p = this; while ((p = p.parent) != null) { l++; } return l; } public String offset() { return offset(" "); } public String offset(String step) { StringBuilder sb = new StringBuilder(); int level = level(); for (int i = 0; i < level; i++) { sb.append(step); } return sb.toString(); } public void add(Node node) { if (children == null) children = new ArrayList<Node>(); children.add(node); node.parent = this; } public Node parent() { return parent; } public String toString() { return "node " + name + " attributes: " + attributes + " children: " + children; } public String name() { return name; } public void name(String name) { this.name = name; } public Node get(String path) { if (children == null || path == null) return null; int l = path.indexOf('/'); if (l == -1) { l = path.indexOf("["); XPathExpression exp = null; if (l != -1) { exp = new XPathExpression(path.substring(l, path.length())); path = path.substring(0, l); } for (Node node : children) { if (node.name != null && node.name.equals(path) && (exp == null || exp.check(node))) return node; } } else { String tag = path.substring(0, l); path = path.substring(l + 1); for (Node node : children) { if (node.name != null && node.name.equals(tag)) { Node r = node.get(path); if (r != null) return r; } } } return null; } public List<Node> getAll(String path) { List<Node> list = new ArrayList<Node>(); if (children == null || path == null) return list; int l = path.indexOf('/'); if (l == -1) { l = path.indexOf("["); XPathExpression exp = null; if (l != -1) { exp = new XPathExpression(path.substring(l, path.length())); path = path.substring(0, l); } for (Node node : children) { if (node.name != null && node.name.equals(path) && (exp == null || exp.check(node))) list.add(node); } } else { String tag = path.substring(0, l); path = path.substring(l + 1); for (Node node : children) { if (node.name != null && node.name.equals(tag)) { List<Node> r = node.getAll(path); if (r != null) list.addAll(r); } } } return list; } public Node find(String path) { if (children == null || path == null) return null; for (Node node : children) { if (node.name != null && node.name.equals(path)) return node; Node r = node.find(path); if (r != null) return r; } return null; } public List<Node> findAll(String path) { List<Node> list = new ArrayList<Node>(); if (children == null || path == null) return list; for (Node node : children) { if (node.name != null && node.name.equals(path)) list.add(node); List<Node> r = node.findAll(path); list.addAll(r); } return list; } public String text() { return text(true); } public String textOwn() { return text(false); } protected String text(boolean recursive) { if (children == null || children.isEmpty()) { return null; } StringBuilder sb = new StringBuilder(); for (Node node : children) { if (sb.length() > 0) sb.append(' '); String inner; if (recursive) inner = node.text(recursive); else inner = node.ownText(); if (inner != null) sb.append(inner); } return sb.toString(); } protected String ownText() { return null; } public boolean hasAttr(String attr) { if (attributes == null) return false; return attributes.containsKey(attr); } public int size() { return children == null ? 0 : children.size(); } private static class XPathExpression { private String command, result; private static Pattern pattern = Pattern.compile("\\[([^\\[\\]]+)\\]"); private XPathExpression next; private XPathExpression(String command, String result) { this.command = command; this.result = result; } public XPathExpression(String s) { Matcher m = pattern.matcher(s); while (m.find()) { String[] arr = m.group(1).split("=", 2); String command = arr[0].trim(); String result = arr[1].trim(); if ((result.startsWith("\"") && result.endsWith("\"")) || (result.startsWith("'") && result.endsWith("'"))) { result = result.substring(1, result.length() - 1); } if (this.command == null) { this.command = command; this.result = result; } if (next == null) { next = new XPathExpression(command, result); } else { XPathExpression next = this.next; while (next.next != null) { next = next.next; } next.next = new XPathExpression(command, result); } } } public boolean check(Node n) { boolean b = false; if (command.equals("text()")) { b = result.equals(n.text()); } else if (command.startsWith("@")) { b = result.equals(n.attr(command.substring(1))); } return b && (next == null || next.check(n)); } } public static class TextNode extends Node { protected String text; public TextNode(String text) { this.text = text; } public String toString() { return "textNode: " + text; } protected String text(boolean recursive) { return text; } protected String ownText() { return text; } } public static class CommentNode extends TextNode { public CommentNode(String text) { super(text); } public String toString() { return "commentNode: " + text; } protected String text(boolean recursive) { return "<!-- " + text + " -->"; } protected String ownText() { return "<!-- " + text + " -->"; } } public static Node parse(String s) { return parse(s, false); } public static Node parse(String s, boolean html) { // check first char s = s.trim(); Node xml = new Node(); if (s.startsWith("<?xml ")) { parse(s.toCharArray(), s.indexOf("?>") + 2, xml, html); } else if (html) { int i = 0; Node document = new Node("document"); while ((i = parse(s.toCharArray(), i, xml, html) + 1) < s.length()) { if (xml.name == null && xml.children.size() == 1) xml = xml.children.get(0); document.add(xml); xml = new Node(); } if (xml.name == null && xml.children.size() == 1) xml = xml.children.get(0); document.add(xml); return document; } else { parse(s.toCharArray(), 0, xml, html); } return xml; } public static Node parse(File f) throws IOException { return parse(f, false); } public static Node parse(File f, boolean html) throws IOException { ByteArrayOutputStream bout = new ByteArrayOutputStream(); FileInputStream in = new FileInputStream(f); int r; byte[] b = new byte[10240]; while ((r = in.read(b)) != -1) { bout.write(b, 0, r); } return parse(new String(bout.toByteArray()), html); } private static int parse(char[] s, int from, Node xml, boolean html) { int i = from; StringBuilder sb = new StringBuilder(); boolean inString = false; boolean name = false; boolean end = false; boolean attribute = false; String attributeName = null; boolean checkClose = false; boolean comment = false; boolean inTag = false; outer: while (i < s.length) { switch (s[i]) { case '"': { if (comment) { sb.append(s[i]); break; } boolean switchInString = (i == 0 || s[i - 1] != '\\'); if (!switchInString && inString) { sb.append('"'); } if (switchInString) { inString = !inString; } if (!inString) { xml.attribute(attributeName, sb.toString()); attributeName = null; sb.setLength(0); attribute = false; } break; } case '<': { if (comment) { sb.append(s[i]); break; } if (sb.length() > 0) { xml.add(new TextNode(sb.toString())); sb.setLength(0); } if (xml.name() != null) checkClose = true; name = true; inTag = true; break; } case ' ': { if (comment) { sb.append(s[i]); break; } if (name) { name = false; if (!end) { xml.name(sb.toString()); sb.setLength(0); attribute = true; } } else if (attribute) { attributeName = sb.toString().trim(); if (attributeName.length() > 0) { xml.attribute(attributeName, null); } sb.setLength(0); attribute = false; } if (!inString && inTag) { attribute = true; } else if (sb.length() != 0) { sb.append(' '); } break; } case '=': { if (comment) { sb.append(s[i]); break; } if (attribute) { attributeName = sb.toString().trim(); sb.setLength(0); attribute = false; } else if (inString) { sb.append('='); } break; } case '>': { if (attribute) { attributeName = sb.toString().trim(); sb.setLength(0); } attribute = false; if (comment) { if (sb.charAt(sb.length() - 1) == '-' && sb.charAt(sb.length() - 2) == '-') { xml.add(new CommentNode(sb.substring(2, sb.length() - 2).trim())); sb.setLength(0); comment = false; } else { sb.append('>'); } break; } inTag = false; if (name) { name = false; if (!end) { xml.name(sb.toString()); sb.setLength(0); } else { if (xml.name() == null) { xml.name(sb.toString()); sb.setLength(0); } else if (!sb.toString().equals(xml.name())) throw new IllegalStateException("illegal close tag: " + sb.toString() + ". close tag must be: " + xml.name()); } } if (end) { break outer; } else if (html && selfClosedTags.contains(xml.name().toLowerCase())) { break outer; } break; } case '/': { if (comment) { sb.append(s[i]); break; } if (inString || !inTag) { sb.append('/'); break; } if (attribute) { attributeName = sb.toString().trim(); sb.setLength(0); attribute = false; } end = true; checkClose = false; break; } case '\n': { if (sb.length() != 0) { sb.append('\n'); } break; } default: { if (checkClose && !end) { if (s[i] == '!') { comment = true; inTag = false; } else { Node child = new Node(); i = parse(s, i - 1, child, html); xml.add(child); } checkClose = false; name = false; } else sb.append(s[i]); break; } } i++; } if (attributeName != null && attributeName.length() > 0) { xml.attribute(attributeName, null); } String t; if (sb.length() > 0 && !(t = sb.toString()).equals(xml.name)) { xml.add(new TextNode(t)); sb.setLength(0); } return i; } }
src/org/bordl/xml/Node.java
package org.bordl.xml; /** * @author: moxa * Date: 12/24/12 */ import java.io.ByteArrayOutputStream; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; /** * @author: moxa * Date: 12/23/12 */ public class Node { protected Map<String, String> attributes; protected List<Node> children; protected String name; protected Node parent; private static final Set<String> selfClosedTags = new HashSet<String>(); static { selfClosedTags.add("area"); selfClosedTags.add("base"); selfClosedTags.add("br"); selfClosedTags.add("col"); selfClosedTags.add("command"); selfClosedTags.add("embed"); selfClosedTags.add("hr"); selfClosedTags.add("img"); selfClosedTags.add("input"); selfClosedTags.add("keygen"); selfClosedTags.add("link"); selfClosedTags.add("meta"); selfClosedTags.add("param"); selfClosedTags.add("source"); selfClosedTags.add("track"); selfClosedTags.add("wbr"); selfClosedTags.add("!doctype"); } public Node(String name) { this.name = name; } protected Node() { } public void attribute(String attributeName, String value) { if (attributes == null) attributes = new LinkedHashMap<String, String>(); attributes.put(attributeName, value); } public void attr(String attributeName, String value) { if (attributes == null) attributes = new LinkedHashMap<String, String>(); attributes.put(attributeName, value); } public String attribute(String attributeName) { if (attributes == null) return null; return attributes.get(attributeName); } public String attr(String attributeName) { if (attributes == null) return null; return attributes.get(attributeName); } public Set<String> attributesNames() { return attributes.keySet(); } public Map<String, String> attributes() { if (attributes == null) { attributes = new LinkedHashMap<String, String>(); } return attributes; } public List<Node> children() { if (children == null) { children = new ArrayList<Node>(); } return children; } public Node get(int i) { return children().get(i); } public Node first() { return children().get(0); } public Node last() { return children().get(children.size() - 1); } public boolean isEmpty() { return children == null || children.isEmpty(); } public int level() { int l = 0; Node p = this; while ((p = p.parent) != null) { l++; } return l; } public String offset() { return offset(" "); } public String offset(String step) { StringBuilder sb = new StringBuilder(); int level = level(); for (int i = 0; i < level; i++) { sb.append(step); } return sb.toString(); } public void add(Node node) { if (children == null) children = new ArrayList<Node>(); children.add(node); node.parent = this; } public Node parent() { return parent; } public String toString() { return "node " + name + " attributes: " + attributes + " children: " + children; } public String name() { return name; } public void name(String name) { this.name = name; } public Node get(String path) { if (children == null || path == null) return null; int l = path.indexOf('/'); if (l == -1) { l = path.indexOf("["); XPathExpression exp = null; if (l != -1) { exp = new XPathExpression(path.substring(l, path.length())); path = path.substring(0, l); } for (Node node : children) { if (node.name != null && node.name.equals(path) && (exp == null || exp.check(node))) return node; } } else { String tag = path.substring(0, l); path = path.substring(l + 1); for (Node node : children) { if (node.name != null && node.name.equals(tag)) { Node r = node.get(path); if (r != null) return r; } } } return null; } public List<Node> getAll(String path) { List<Node> list = new ArrayList<Node>(); if (children == null || path == null) return list; int l = path.indexOf('/'); if (l == -1) { l = path.indexOf("["); XPathExpression exp = null; if (l != -1) { exp = new XPathExpression(path.substring(l, path.length())); path = path.substring(0, l); } for (Node node : children) { if (node.name != null && node.name.equals(path) && (exp == null || exp.check(node))) list.add(node); } } else { String tag = path.substring(0, l); path = path.substring(l + 1); for (Node node : children) { if (node.name != null && node.name.equals(tag)) { List<Node> r = node.getAll(path); if (r != null) list.addAll(r); } } } return list; } public Node find(String path) { if (children == null || path == null) return null; for (Node node : children) { if (node.name != null && node.name.equals(path)) return node; Node r = node.find(path); if (r != null) return r; } return null; } public List<Node> findAll(String path) { List<Node> list = new ArrayList<Node>(); if (children == null || path == null) return list; for (Node node : children) { if (node.name != null && node.name.equals(path)) list.add(node); List<Node> r = node.findAll(path); list.addAll(r); } return list; } public String text() { return text(true); } public String textOwn() { return text(false); } protected String text(boolean recursive) { if (children == null || children.isEmpty()) { return null; } StringBuilder sb = new StringBuilder(); for (Node node : children) { if (sb.length() > 0) sb.append(' '); String inner; if (recursive) inner = node.text(recursive); else inner = node.ownText(); if (inner != null) sb.append(inner); } return sb.toString(); } protected String ownText() { return null; } public boolean hasAttr(String attr) { if (attributes == null) return false; return attributes.containsKey(attr); } public int size() { return children == null ? 0 : children.size(); } private static class XPathExpression { private String command, result; private static Pattern pattern = Pattern.compile("\\[([^\\[\\]]+)\\]"); private XPathExpression next; private XPathExpression(String command, String result) { this.command = command; this.result = result; } public XPathExpression(String s) { Matcher m = pattern.matcher(s); while (m.find()) { String[] arr = m.group(1).split("=", 2); String command = arr[0].trim(); String result = arr[1].trim(); if ((result.startsWith("\"") && result.endsWith("\"")) || (result.startsWith("'") && result.endsWith("'"))) { result = result.substring(1, result.length() - 1); } if (this.command == null) { this.command = command; this.result = result; } if (next == null) { next = new XPathExpression(command, result); } else { XPathExpression next = this.next; while (next.next != null) { next = next.next; } next.next = new XPathExpression(command, result); } } } public boolean check(Node n) { boolean b = false; if (command.equals("text()")) { b = result.equals(n.text()); } else if (command.startsWith("@")) { b = result.equals(n.attr(command.substring(1))); } return b && (next == null || next.check(n)); } } public static class TextNode extends Node { protected String text; public TextNode(String text) { this.text = text; } public String toString() { return "textNode: " + text; } protected String text(boolean recursive) { return text; } protected String ownText() { return text; } } public static class CommentNode extends TextNode { public CommentNode(String text) { super(text); } public String toString() { return "commentNode: " + text; } protected String text(boolean recursive) { return "<!-- " + text + " -->"; } protected String ownText() { return "<!-- " + text + " -->"; } } public static Node parse(String s) { return parse(s, false); } public static Node parse(String s, boolean html) { // check first char s = s.trim(); Node xml = new Node(); if (s.startsWith("<?xml ")) { parse(s.toCharArray(), s.indexOf("?>") + 2, xml, html); } else if (html) { int i = 0; Node document = new Node("document"); while ((i = parse(s.toCharArray(), i, xml, html) + 1) < s.length()) { if (xml.name == null && xml.children.size() == 1) xml = xml.children.get(0); document.add(xml); xml = new Node(); } if (xml.name == null && xml.children.size() == 1) xml = xml.children.get(0); document.add(xml); return document; } else { parse(s.toCharArray(), 0, xml, html); } return xml; } public static Node parse(File f) throws IOException { return parse(f, false); } public static Node parse(File f, boolean html) throws IOException { ByteArrayOutputStream bout = new ByteArrayOutputStream(); FileInputStream in = new FileInputStream(f); int r; byte[] b = new byte[10240]; while ((r = in.read(b)) != -1) { bout.write(b, 0, r); } return parse(new String(bout.toByteArray()), html); } private static int parse(char[] s, int from, Node xml, boolean html) { int i = from; StringBuilder sb = new StringBuilder(); boolean inString = false; boolean name = false; boolean end = false; boolean attribute = false; String attributeName = null; boolean checkClose = false; boolean comment = false; boolean inTag = false; outer: while (i < s.length) { switch (s[i]) { case '"': { if (comment) { sb.append(s[i]); break; } boolean switchInString = (i == 0 || s[i - 1] != '\\'); if (!switchInString && inString) { sb.append('"'); } if (switchInString) { inString = !inString; } if (!inString) { xml.attribute(attributeName, sb.toString()); attributeName = null; sb.setLength(0); attribute = false; } break; } case '<': { if (comment) { sb.append(s[i]); break; } if (sb.length() > 0) { xml.add(new TextNode(sb.toString())); sb.setLength(0); } if (xml.name() != null) checkClose = true; name = true; inTag = true; break; } case ' ': { if (comment) { sb.append(s[i]); break; } if (name) { name = false; if (!end) { xml.name(sb.toString()); sb.setLength(0); attribute = true; } } else if (attribute) { attributeName = sb.toString().trim(); if (attributeName.length() > 0) { xml.attribute(attributeName, null); } sb.setLength(0); attribute = false; } if (!inString && inTag) { attribute = true; } else if (sb.length() != 0) { sb.append(' '); } break; } case '=': { if (comment) { sb.append(s[i]); break; } if (attribute) { attributeName = sb.toString().trim(); sb.setLength(0); attribute = false; } else if (inString) { sb.append('='); } break; } case '>': { if (attribute) { attributeName = sb.toString().trim(); sb.setLength(0); } attribute = false; if (comment) { if (sb.charAt(sb.length() - 1) == '-' && sb.charAt(sb.length() - 2) == '-') { xml.add(new CommentNode(sb.substring(2, sb.length() - 2).trim())); sb.setLength(0); comment = false; } else { sb.append('>'); } break; } inTag = false; if (name) { name = false; if (!end) { xml.name(sb.toString()); sb.setLength(0); } else { if (xml.name() == null) { xml.name(sb.toString()); sb.setLength(0); } else if (!sb.toString().equals(xml.name())) throw new IllegalStateException("illegal close tag: " + sb.toString() + ". close tag must be: " + xml.name()); } } if (end) { break outer; } else if (html && selfClosedTags.contains(xml.name().toLowerCase())) { break outer; } break; } case '/': { if (comment) { sb.append(s[i]); break; } if (inString) { sb.append('/'); break; } if (attribute) { attributeName = sb.toString().trim(); sb.setLength(0); attribute = false; } end = true; checkClose = false; break; } case '\n': { if (sb.length() != 0) { sb.append('\n'); } break; } default: { if (checkClose && !end) { if (s[i] == '!') { comment = true; inTag = false; } else { Node child = new Node(); i = parse(s, i - 1, child, html); xml.add(child); } checkClose = false; name = false; } else sb.append(s[i]); break; } } i++; } if (attributeName != null && attributeName.length() > 0) { xml.attribute(attributeName, null); } String t; if (sb.length() > 0 && !(t = sb.toString()).equals(xml.name)) { xml.add(new TextNode(t)); sb.setLength(0); } return i; } }
small fix
src/org/bordl/xml/Node.java
small fix
Java
mit
9a1496d36359ca0de4fed9feff453e19e34c23c6
0
aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips,aterai/java-swing-tips
package example; //-*- mode:java; encoding:utf-8 -*- // vim:set fileencoding=utf-8: //@homepage@ import java.awt.*; import java.awt.event.*; import java.awt.image.*; import java.util.Objects; import javax.swing.*; import javax.swing.table.*; public final class MainPanel extends JPanel { private MainPanel() { super(new BorderLayout()); ListModel<IconItem> list = makeIconList(); TableModel model = makeIconTableModel(list); JTable table = new IconTable(model, list); JPanel p = new JPanel(new GridBagLayout()); p.add(table, new GridBagConstraints()); p.setBackground(Color.WHITE); add(p); setPreferredSize(new Dimension(320, 240)); } private static ListModel<IconItem> makeIconList() { DefaultListModel<IconItem> list = new DefaultListModel<>(); list.addElement(new IconItem("wi0009")); list.addElement(new IconItem("wi0054")); list.addElement(new IconItem("wi0062")); list.addElement(new IconItem("wi0063")); list.addElement(new IconItem("wi0064")); list.addElement(new IconItem("wi0096")); list.addElement(new IconItem("wi0111")); list.addElement(new IconItem("wi0122")); list.addElement(new IconItem("wi0124")); return list; } private static <E extends IconItem> TableModel makeIconTableModel(ListModel<E> list) { Object[][] data = { {list.getElementAt(0), list.getElementAt(1), list.getElementAt(2)}, {list.getElementAt(3), list.getElementAt(4), list.getElementAt(5)}, {list.getElementAt(6), list.getElementAt(7), list.getElementAt(8)} }; return new DefaultTableModel(data, null) { @Override public boolean isCellEditable(int row, int column) { return false; } @Override public int getColumnCount() { return 3; } }; } public static void main(String... args) { EventQueue.invokeLater(new Runnable() { @Override public void run() { createAndShowGUI(); } }); } public static void createAndShowGUI() { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) { ex.printStackTrace(); } JFrame frame = new JFrame("@title@"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().add(new MainPanel()); frame.pack(); frame.setResizable(false); frame.setLocationRelativeTo(null); frame.setVisible(true); } } class IconItem { public final ImageIcon large; public final ImageIcon small; protected IconItem(String str) { large = new ImageIcon(getClass().getResource(str + "-48.png")); small = new ImageIcon(getClass().getResource(str + "-24.png")); } } class IconTableCellRenderer extends DefaultTableCellRenderer { @Override public void updateUI() { super.updateUI(); setHorizontalAlignment(SwingConstants.CENTER); } @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { setIcon(((IconItem) value).large); return this; } } class IconTable extends JTable { protected static final int XOFF = 4; protected final JList<IconItem> editor; protected final JComponent glassPane = new JComponent() { @Override public void setVisible(boolean flag) { super.setVisible(flag); setFocusTraversalPolicyProvider(flag); setFocusCycleRoot(flag); } @Override protected void paintComponent(Graphics g) { g.setColor(new Color(0x64FFFFFF, true)); g.fillRect(0, 0, getWidth(), getHeight()); BufferedImage bufimg = new BufferedImage(getWidth(), getHeight(), BufferedImage.TYPE_INT_ARGB); Graphics2D g2 = bufimg.createGraphics(); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, .15f)); g2.setPaint(Color.BLACK); Rectangle r = editor.getBounds(); for (int i = 0; i < XOFF; i++) { g2.fillRoundRect(r.x - i, r.y + XOFF, r.width + i + i, r.height - XOFF + i, 5, 5); } g2.dispose(); g.drawImage(bufimg, 0, 0, null); } }; protected IconTable(TableModel model, ListModel<IconItem> list) { super(model); setDefaultRenderer(Object.class, new IconTableCellRenderer()); setSelectionMode(ListSelectionModel.SINGLE_SELECTION); initCellSize(50); addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { startEditing(); } }); editor = new EditorFromList<>(list); editor.getInputMap(JComponent.WHEN_FOCUSED).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), "cancel-editing"); editor.getActionMap().put("cancel-editing", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { cancelEditing(); } }); // editor.addKeyListener(new KeyAdapter() { // @Override public void keyPressed(KeyEvent e) { // if (e.getKeyCode() == KeyEvent.VK_ESCAPE) { // cancelEditing(); // } // } // }); editor.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { Point p = e.getPoint(); IconItem item = editor.getModel().getElementAt(editor.locationToIndex(p)); setValueAt(item, getSelectedRow(), getSelectedColumn()); cancelEditing(); } }); glassPane.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { // Point pt = e.getPoint(); // if (!editor.getBounds().contains(pt)) { // cancelEditing(); // } cancelEditing(); } }); glassPane.setFocusTraversalPolicy(new DefaultFocusTraversalPolicy() { @Override public boolean accept(Component c) { return Objects.equals(c, editor); } }); glassPane.add(editor); glassPane.setVisible(false); } private void initCellSize(int size) { setRowHeight(size); JTableHeader tableHeader = getTableHeader(); tableHeader.setResizingAllowed(false); tableHeader.setReorderingAllowed(false); TableColumnModel m = getColumnModel(); for (int i = 0; i < m.getColumnCount(); i++) { TableColumn col = m.getColumn(i); col.setMinWidth(size); col.setMaxWidth(size); } setBorder(BorderFactory.createLineBorder(Color.BLACK)); } public void startEditing() { getRootPane().setGlassPane(glassPane); Dimension d = editor.getPreferredSize(); editor.setSize(d); int sr = getSelectedRow(); int sc = getSelectedColumn(); Rectangle r = getCellRect(sr, sc, true); Point p = SwingUtilities.convertPoint(this, r.getLocation(), glassPane); p.translate((r.width - d.width) / 2, (r.height - d.height) / 2); editor.setLocation(p); glassPane.setVisible(true); editor.setSelectedValue(getValueAt(sr, sc), true); editor.requestFocusInWindow(); } protected void cancelEditing() { glassPane.setVisible(false); } } class EditorFromList<E extends IconItem> extends JList<E> { private static final int INS = 2; private final Dimension dim; protected transient RollOverListener handler; protected int rollOverRowIndex = -1; protected EditorFromList(ListModel<E> model) { super(model); ImageIcon icon = model.getElementAt(0).small; int iw = INS + icon.getIconWidth(); int ih = INS + icon.getIconHeight(); dim = new Dimension(iw * 3 + INS, ih * 3 + INS); setFixedCellWidth(iw); setFixedCellHeight(ih); } @Override public Dimension getPreferredSize() { return dim; } @Override public void updateUI() { removeMouseMotionListener(handler); removeMouseListener(handler); super.updateUI(); handler = new RollOverListener(); addMouseMotionListener(handler); addMouseListener(handler); setBorder(BorderFactory.createLineBorder(Color.BLACK)); setLayoutOrientation(JList.HORIZONTAL_WRAP); setVisibleRowCount(0); setCellRenderer(new ListCellRenderer<IconItem>() { private final JLabel label = new JLabel(); private final Color selctedColor = new Color(0xC8C8FF); @Override public Component getListCellRendererComponent(JList<? extends IconItem> list, IconItem value, int index, boolean isSelected, boolean cellHasFocus) { label.setOpaque(true); label.setHorizontalAlignment(SwingConstants.CENTER); if (index == rollOverRowIndex) { label.setBackground(getSelectionBackground()); } else if (isSelected) { label.setBackground(selctedColor); } else { label.setBackground(getBackground()); } label.setIcon(value.small); return label; } }); } private class RollOverListener extends MouseAdapter { @Override public void mouseExited(MouseEvent e) { rollOverRowIndex = -1; repaint(); } @Override public void mouseMoved(MouseEvent e) { int row = locationToIndex(e.getPoint()); if (row != rollOverRowIndex) { rollOverRowIndex = row; repaint(); } } } }
LargeCellEditor/src/java/example/MainPanel.java
package example; //-*- mode:java; encoding:utf-8 -*- // vim:set fileencoding=utf-8: //@homepage@ import java.awt.*; import java.awt.event.*; import java.awt.image.*; import java.util.Objects; import javax.swing.*; import javax.swing.table.*; public final class MainPanel extends JPanel { public MainPanel() { super(new BorderLayout()); ListModel<IconItem> list = makeIconList(); TableModel model = makeIconTableModel(list); JTable table = new IconTable(model, list); JPanel p = new JPanel(new GridBagLayout()); p.add(table, new GridBagConstraints()); p.setBackground(Color.WHITE); add(p); setPreferredSize(new Dimension(320, 240)); } private ListModel<IconItem> makeIconList() { DefaultListModel<IconItem> list = new DefaultListModel<>(); list.addElement(new IconItem("wi0009")); list.addElement(new IconItem("wi0054")); list.addElement(new IconItem("wi0062")); list.addElement(new IconItem("wi0063")); list.addElement(new IconItem("wi0064")); list.addElement(new IconItem("wi0096")); list.addElement(new IconItem("wi0111")); list.addElement(new IconItem("wi0122")); list.addElement(new IconItem("wi0124")); return list; } private static TableModel makeIconTableModel(ListModel<?> list) { Object[][] data = { {list.getElementAt(0), list.getElementAt(1), list.getElementAt(2)}, {list.getElementAt(3), list.getElementAt(4), list.getElementAt(5)}, {list.getElementAt(6), list.getElementAt(7), list.getElementAt(8)} }; return new DefaultTableModel(data, null) { @Override public boolean isCellEditable(int row, int column) { return false; } @Override public int getColumnCount() { return 3; } }; } public static void main(String... args) { EventQueue.invokeLater(new Runnable() { @Override public void run() { createAndShowGUI(); } }); } public static void createAndShowGUI() { try { UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException | UnsupportedLookAndFeelException ex) { ex.printStackTrace(); } JFrame frame = new JFrame("@title@"); frame.setDefaultCloseOperation(WindowConstants.EXIT_ON_CLOSE); frame.getContentPane().add(new MainPanel()); frame.pack(); frame.setResizable(false); frame.setLocationRelativeTo(null); frame.setVisible(true); } } class IconItem { public final ImageIcon large; public final ImageIcon small; protected IconItem(String str) { large = new ImageIcon(getClass().getResource(str + "-48.png")); small = new ImageIcon(getClass().getResource(str + "-24.png")); } } class IconTableCellRenderer extends DefaultTableCellRenderer { @Override public void updateUI() { super.updateUI(); setHorizontalAlignment(SwingConstants.CENTER); } @Override public Component getTableCellRendererComponent(JTable table, Object value, boolean isSelected, boolean hasFocus, int row, int column) { setIcon(((IconItem) value).large); return this; } } class IconTable extends JTable { protected static final int XOFF = 4; protected final JList<IconItem> editor; protected final JComponent glassPane = new JComponent() { @Override public void setVisible(boolean flag) { super.setVisible(flag); setFocusTraversalPolicyProvider(flag); setFocusCycleRoot(flag); } @Override protected void paintComponent(Graphics g) { g.setColor(new Color(0x64FFFFFF, true)); g.fillRect(0, 0, getWidth(), getHeight()); BufferedImage bufimg = new BufferedImage(getWidth(), getHeight(), BufferedImage.TYPE_INT_ARGB); Graphics2D g2 = bufimg.createGraphics(); g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, .15f)); g2.setPaint(Color.BLACK); Rectangle r = editor.getBounds(); for (int i = 0; i < XOFF; i++) { g2.fillRoundRect(r.x - i, r.y + XOFF, r.width + i + i, r.height - XOFF + i, 5, 5); } g2.dispose(); g.drawImage(bufimg, 0, 0, null); } }; protected IconTable(TableModel model, ListModel<IconItem> list) { super(model); setDefaultRenderer(Object.class, new IconTableCellRenderer()); setSelectionMode(ListSelectionModel.SINGLE_SELECTION); initCellSize(50); addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { startEditing(); } }); editor = new EditorFromList<>(list); editor.getInputMap(JComponent.WHEN_FOCUSED).put(KeyStroke.getKeyStroke(KeyEvent.VK_ESCAPE, 0), "cancel-editing"); editor.getActionMap().put("cancel-editing", new AbstractAction() { @Override public void actionPerformed(ActionEvent e) { cancelEditing(); } }); // editor.addKeyListener(new KeyAdapter() { // @Override public void keyPressed(KeyEvent e) { // if (e.getKeyCode() == KeyEvent.VK_ESCAPE) { // cancelEditing(); // } // } // }); editor.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { Point p = e.getPoint(); IconItem item = editor.getModel().getElementAt(editor.locationToIndex(p)); setValueAt(item, getSelectedRow(), getSelectedColumn()); cancelEditing(); } }); glassPane.addMouseListener(new MouseAdapter() { @Override public void mouseClicked(MouseEvent e) { //Point pt = e.getPoint(); //if (!editor.getBounds().contains(pt)) { // cancelEditing(); //} cancelEditing(); } }); glassPane.setFocusTraversalPolicy(new DefaultFocusTraversalPolicy() { @Override public boolean accept(Component c) { return Objects.equals(c, editor); } }); glassPane.add(editor); glassPane.setVisible(false); } private void initCellSize(int size) { setRowHeight(size); JTableHeader tableHeader = getTableHeader(); tableHeader.setResizingAllowed(false); tableHeader.setReorderingAllowed(false); TableColumnModel m = getColumnModel(); for (int i = 0; i < m.getColumnCount(); i++) { TableColumn col = m.getColumn(i); col.setMinWidth(size); col.setMaxWidth(size); } setBorder(BorderFactory.createLineBorder(Color.BLACK)); } public void startEditing() { getRootPane().setGlassPane(glassPane); Dimension d = editor.getPreferredSize(); editor.setSize(d); int sr = getSelectedRow(); int sc = getSelectedColumn(); Rectangle r = getCellRect(sr, sc, true); Point p = SwingUtilities.convertPoint(this, r.getLocation(), glassPane); p.translate((r.width - d.width) / 2, (r.height - d.height) / 2); editor.setLocation(p); glassPane.setVisible(true); editor.setSelectedValue(getValueAt(sr, sc), true); editor.requestFocusInWindow(); } protected void cancelEditing() { glassPane.setVisible(false); } } class EditorFromList<E extends IconItem> extends JList<E> { private static final int INS = 2; private final Dimension dim; protected transient RollOverListener handler; protected int rollOverRowIndex = -1; protected EditorFromList(ListModel<E> model) { super(model); ImageIcon icon = model.getElementAt(0).small; int iw = INS + icon.getIconWidth(); int ih = INS + icon.getIconHeight(); dim = new Dimension(iw * 3 + INS, ih * 3 + INS); setFixedCellWidth(iw); setFixedCellHeight(ih); } @Override public Dimension getPreferredSize() { return dim; } @Override public void updateUI() { removeMouseMotionListener(handler); removeMouseListener(handler); super.updateUI(); handler = new RollOverListener(); addMouseMotionListener(handler); addMouseListener(handler); setBorder(BorderFactory.createLineBorder(Color.BLACK)); setLayoutOrientation(JList.HORIZONTAL_WRAP); setVisibleRowCount(0); setCellRenderer(new ListCellRenderer<IconItem>() { private final JLabel label = new JLabel(); private final Color selctedColor = new Color(0xC8C8FF); @Override public Component getListCellRendererComponent(JList<? extends IconItem> list, IconItem value, int index, boolean isSelected, boolean cellHasFocus) { label.setOpaque(true); label.setHorizontalAlignment(SwingConstants.CENTER); if (index == rollOverRowIndex) { label.setBackground(getSelectionBackground()); } else if (isSelected) { label.setBackground(selctedColor); } else { label.setBackground(getBackground()); } label.setIcon(value.small); return label; } }); } private class RollOverListener extends MouseAdapter { @Override public void mouseExited(MouseEvent e) { rollOverRowIndex = -1; repaint(); } @Override public void mouseMoved(MouseEvent e) { int row = locationToIndex(e.getPoint()); if (row != rollOverRowIndex) { rollOverRowIndex = row; repaint(); } } } }
refactor: change the final class constructor to private
LargeCellEditor/src/java/example/MainPanel.java
refactor: change the final class constructor to private
Java
epl-1.0
1e12238272b5df681e8d200387bbc05d539ac87b
0
usethesource/rascal-value
package io.usethesource.vallang; import java.io.IOException; import java.io.StringReader; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Random; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.Stream; import org.checkerframework.checker.nullness.qual.Nullable; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.ArgumentsProvider; import io.usethesource.vallang.exceptions.FactParseError; import io.usethesource.vallang.exceptions.FactTypeUseException; import io.usethesource.vallang.io.StandardTextReader; import io.usethesource.vallang.random.RandomValueGenerator; import io.usethesource.vallang.type.Type; import io.usethesource.vallang.type.TypeFactory; import io.usethesource.vallang.type.TypeStore; /** * This value provider generates automatically/randomly values for test parameters of type: * IValueFactory * TypeFactory * TypeStore * IValue * IList * ISet * IMap * IInteger * IReal * INumber * IRational * INode * IConstructor * ITuple * ISourceLocation * Type * * If the class under test has a static field called "store" of type TypeStore, then this * typestore will be passed to all parameters of type TypeStore instead of a fresh/empty TypeStore. * * If a parameter of a method under test is annotated with @ExpectedType("type") like so: * \@ParameterizedTest \@ArgumentsSource(ValueProvider.class) * public void myTest(\@ExpectedType("set[int]") ISet set) ... * * , then the ValueProvider will generate only instances which have as run-time type a * sub-type of the specified expected type. * */ public class ValueProvider implements ArgumentsProvider { private static final TypeFactory tf = TypeFactory.getInstance(); private static final RandomValueGenerator gen; private static final @Nullable String seedProperty; private static final long seed; private static final Random rnd; static { seedProperty = System.getProperty("vallang.test.seed"); if (seedProperty != null) { System.err.println("Current random seed is computed from -Dvallang.test.seed=" + seedProperty); seed = hashSeed(seedProperty); rnd = new Random(seed); } else { seed = new Random().nextLong(); rnd = new Random(seed); } System.err.println("Current random seed is: " + seed); gen = new RandomValueGenerator(rnd); } /** * We use this to accidentally generate arguments which are the same as the previous * once in a while: */ private IValue previous = null; /** * Every vallang test is run using all implementations of IValueFactory. */ private static final IValueFactory[] factories = { io.usethesource.vallang.impl.reference.ValueFactory.getInstance(), io.usethesource.vallang.impl.persistent.ValueFactory.getInstance() }; /** * This trivial class helps with streaming generated test inputs, and some other stuff. */ private static class Tuple<A,B> { public A a; public B b; public Tuple(A a, B b) { this.a = a; this.b = b; } public static <C,D> Tuple<C,D> of(C c, D d) { return new Tuple<>(c, d); } } /** * Maps Java class literals of sub-types of IValue to the corresponding function which will * generate a (random) instance of a type that all instances of such Java classes could have. * Only composite types will actually be random. */ private static final Map<Class<? extends IValue>, BiFunction<TypeStore, ExpectedType, Type>> types = Stream.<Tuple<Class<? extends IValue>, BiFunction<TypeStore, ExpectedType, Type>>>of( Tuple.of(IInteger.class, (ts, n) -> tf.integerType()), Tuple.of(IBool.class, (ts, n) -> tf.boolType()), Tuple.of(IReal.class, (ts, n) -> tf.realType()), Tuple.of(IRational.class, (ts, n) -> tf.rationalType()), Tuple.of(INumber.class, (ts, n) -> tf.numberType()), Tuple.of(IString.class, (ts, n) -> tf.stringType()), Tuple.of(ISourceLocation.class, (ts, n) -> tf.sourceLocationType()), Tuple.of(IValue.class, (ts, n) -> tf.valueType()), Tuple.of(INode.class, (ts, n) -> tf.nodeType()), Tuple.of(IList.class, (ts, n) -> tf.listType(tf.randomType(ts))), Tuple.of(ISet.class, (ts, n) -> tf.setType(tf.randomType(ts))), Tuple.of(ITuple.class, (ts, n) -> tf.tupleType(tf.randomType(ts), tf.randomType(ts))), Tuple.of(IMap.class, (ts, n) -> tf.mapType(tf.randomType(ts), tf.randomType(ts))), Tuple.of(IConstructor.class, (ts, n) -> randomADT(ts, n)) ).collect(Collectors.toMap(t -> t.a, t -> t.b)); @Override public Stream<? extends Arguments> provideArguments(ExtensionContext context) { Method method = context.getTestMethod().get(); /* * If only factories and typestores are arguments, we generate as many tests as we have * value factory implementations (2). For the IValue argument we generate 100 tests and for * every additional IValue argument we multiply the number of tests by 10. */ long valueArity = Arrays.stream(method.getParameterTypes()).filter(x -> IValue.class.isAssignableFrom(x) || Type.class.isAssignableFrom(x)).count() - Arrays.stream(method.getParameters()).filter(x -> x.getAnnotation(GivenValue.class) != null).count(); int numberOfTests = Math.max(1, 100 * (int) Math.pow(10, valueArity - 1)); return Stream.of( factories[0], factories[1] ).flatMap(vf -> // all parameters share the same factory generateTypeStore(context).flatMap(ts -> Stream.iterate(arguments(method, vf, ts), p -> arguments(method, vf, ts)).limit(numberOfTests) ) ); } private static Type randomADT(TypeStore ts, ExpectedType n) { if (n != null) { Type result = readType(ts, n); if (result != null) { return result; } } Collection<Type> allADTs = ts.getAbstractDataTypes(); if (!allADTs.isEmpty()) { return allADTs.stream().skip(new Random().nextInt(allADTs.size())).findFirst().get(); } // note the side-effect in the type store! Type x = tf.abstractDataType(ts, "X"); tf.constructor(ts, x, "x"); return x; } /** * Generate the random argument for a single test method * @param method the declaration of the method under test * @param vf the valuefactory to use when generating values, also passed to parameters of type IValueFactory * @param ts the TypeStore to request ADTs from, randomly, also passed to parameters of type TypeStore * @return an Arguments instance for streaming into JUnits MethodSource interface. */ private Arguments arguments(Method method, IValueFactory vf, TypeStore ts) { previous = null; // never reuse arguments from a previous instance ArgumentsSeed argSeed = method.getAnnotation(ArgumentsSeed.class); if (argSeed != null) { gen.setSeed(argSeed.value()); } else { gen.setSeed(seed); } ArgumentsMaxDepth depth = method.getAnnotation(ArgumentsMaxDepth.class); ArgumentsMaxWidth width = method.getAnnotation(ArgumentsMaxWidth.class); return Arguments.of( Arrays.stream(method.getParameters()).map( cl -> argument( vf, ts, cl.getType(), cl.getAnnotation(ExpectedType.class), cl.getAnnotation(GivenValue.class), depth != null ? depth.value() : 5, width != null ? width.value() : 10 )).toArray().clone() ); } private static long hashSeed(String string) { long h = 1125899906842597L; // prime int len = string.length(); for (int i = 0; i < len; i++) { h = 31*h + string.charAt(i); } return h; } /** * Generate an argument to a vallang test function. `cls` can be any sub-type of IValue, * or TypeStore or IValueFactory. * @param vf the valuefactory to use when generating values, also passed to parameters of type IValueFactory * @param ts the TypeStore to request ADTs from, randomly, also passed to parameters of type TypeStore * @param cls the class type of the parameter to generate an input for * @return a random object which is assignable to cls */ private Object argument(IValueFactory vf, TypeStore ts, Class<?> cls, ExpectedType expected, GivenValue givenValue, int depth, int width) { if (givenValue != null) { try { if (expected != null) { return new StandardTextReader().read(vf, ts, readType(ts, expected), new StringReader(givenValue.value())); } else { return new StandardTextReader().read(vf, new StringReader(givenValue.value())); } } catch (FactTypeUseException | IOException e) { System.err.println("[WARNING] failed to parse given value: " + givenValue.value()); } } if (cls.isAssignableFrom(IValueFactory.class)) { return vf; } else if (cls.isAssignableFrom(TypeStore.class)) { return ts; } else if (cls.isAssignableFrom(Type.class)) { return TypeFactory.getInstance().randomType(ts, depth); } else if (cls.isAssignableFrom(TypeFactory.class)) { return TypeFactory.getInstance(); } else if (IValue.class.isAssignableFrom(cls)) { return generateValue(vf, ts, cls.asSubclass(IValue.class), expected, depth, width); } else { throw new IllegalArgumentException(cls + " is not assignable from IValue, IValueFactory, TypeStore or TypeFactory"); } } /** * Generate a random IValue instance * * @param vf the valuefactory/randomgenerator to use * @param ts the TypeStore to draw ADT constructors from * @param cl the `cl` (sub-type of `IValue`) to be assignable to * @param noAnnotations * @return an instance assignable to `cl` */ private IValue generateValue(IValueFactory vf, TypeStore ts, Class<? extends IValue> cl, ExpectedType expected, int depth, int width) { Type expectedType = expected != null ? readType(ts, expected) : types.getOrDefault(cl, (x, n) -> tf.valueType()).apply(ts, expected); if (previous != null && gen.nextInt(4) == 0 && previous.getType().isSubtypeOf(expectedType)) { return gen.nextBoolean() ? previous : reinstantiate(vf, ts, previous); } return (previous = gen.generate(expectedType, vf, ts, Collections.emptyMap(), depth, width)); } private static Type readType(TypeStore ts, ExpectedType expected) { try { return tf.fromString(ts, new StringReader(expected.value())); } catch (IOException e) { return null; } } /** * Produces a value which equals the input `val` but is not the same object reference. * It does this by serializing the value and parsing it again with the same expected type. * @return a value equals to `val` (val.equals(returnValue)) but not reference equal (val != returnValue) */ private IValue reinstantiate(IValueFactory vf, TypeStore ts, IValue val) { try { return new StandardTextReader().read(vf, ts, val.getType(), new StringReader(val.toString())); } catch (FactTypeUseException | FactParseError | IOException e) { System.err.println("WARNING: value reinstantation via serialization failed for ["+val+"] because + \""+e.getMessage()+"\". Reusing reference."); return val; } } /** * Generates a TypeStore instance by importing the static `store` field of the class-under-test (if-present) * in a fresh TypeStore. Otherwise it generates a fresh and empty TypeStore. * @param context * @return */ private Stream<TypeStore> generateTypeStore(ExtensionContext context) { try { return Stream.of(new TypeStore((TypeStore) context.getRequiredTestClass().getField("store").get("null"))); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { return Stream.of(new TypeStore()); } } }
src/test/java/io/usethesource/vallang/ValueProvider.java
package io.usethesource.vallang; import java.io.IOException; import java.io.StringReader; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Map; import java.util.Random; import java.util.function.BiFunction; import java.util.stream.Collectors; import java.util.stream.Stream; import org.checkerframework.checker.nullness.qual.Nullable; import org.junit.jupiter.api.extension.ExtensionContext; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.ArgumentsProvider; import io.usethesource.vallang.exceptions.FactParseError; import io.usethesource.vallang.exceptions.FactTypeUseException; import io.usethesource.vallang.io.StandardTextReader; import io.usethesource.vallang.random.RandomValueGenerator; import io.usethesource.vallang.type.Type; import io.usethesource.vallang.type.TypeFactory; import io.usethesource.vallang.type.TypeStore; /** * This value provider generates automatically/randomly values for test parameters of type: * IValueFactory * TypeFactory * TypeStore * IValue * IList * ISet * IMap * IInteger * IReal * INumber * IRational * INode * IConstructor * ITuple * ISourceLocation * Type * * If the class under test has a static field called "store" of type TypeStore, then this * typestore will be passed to all parameters of type TypeStore instead of a fresh/empty TypeStore. * * If a parameter of a method under test is annotated with @ExpectedType("type") like so: * \@ParameterizedTest \@ArgumentsSource(ValueProvider.class) * public void myTest(\@ExpectedType("set[int]") ISet set) ... * * , then the ValueProvider will generate only instances which have as run-time type a * sub-type of the specified expected type. * */ public class ValueProvider implements ArgumentsProvider { private static final TypeFactory tf = TypeFactory.getInstance(); private static final RandomValueGenerator gen; private static final @Nullable String seedProperty; private static final long seed; private static final Random rnd; static { seedProperty = System.getProperty("vallang.test.seed"); if (seedProperty != null) { seed = hashSeed(seedProperty); rnd = new Random(seed); } else { seed = 0; rnd = new Random(); } gen = new RandomValueGenerator(rnd); } /** * We use this to accidentally generate arguments which are the same as the previous * once in a while: */ private IValue previous = null; /** * Every vallang test is run using all implementations of IValueFactory. */ private static final IValueFactory[] factories = { io.usethesource.vallang.impl.reference.ValueFactory.getInstance(), io.usethesource.vallang.impl.persistent.ValueFactory.getInstance() }; /** * This trivial class helps with streaming generated test inputs, and some other stuff. */ private static class Tuple<A,B> { public A a; public B b; public Tuple(A a, B b) { this.a = a; this.b = b; } public static <C,D> Tuple<C,D> of(C c, D d) { return new Tuple<>(c, d); } } /** * Maps Java class literals of sub-types of IValue to the corresponding function which will * generate a (random) instance of a type that all instances of such Java classes could have. * Only composite types will actually be random. */ private static final Map<Class<? extends IValue>, BiFunction<TypeStore, ExpectedType, Type>> types = Stream.<Tuple<Class<? extends IValue>, BiFunction<TypeStore, ExpectedType, Type>>>of( Tuple.of(IInteger.class, (ts, n) -> tf.integerType()), Tuple.of(IBool.class, (ts, n) -> tf.boolType()), Tuple.of(IReal.class, (ts, n) -> tf.realType()), Tuple.of(IRational.class, (ts, n) -> tf.rationalType()), Tuple.of(INumber.class, (ts, n) -> tf.numberType()), Tuple.of(IString.class, (ts, n) -> tf.stringType()), Tuple.of(ISourceLocation.class, (ts, n) -> tf.sourceLocationType()), Tuple.of(IValue.class, (ts, n) -> tf.valueType()), Tuple.of(INode.class, (ts, n) -> tf.nodeType()), Tuple.of(IList.class, (ts, n) -> tf.listType(tf.randomType(ts))), Tuple.of(ISet.class, (ts, n) -> tf.setType(tf.randomType(ts))), Tuple.of(ITuple.class, (ts, n) -> tf.tupleType(tf.randomType(ts), tf.randomType(ts))), Tuple.of(IMap.class, (ts, n) -> tf.mapType(tf.randomType(ts), tf.randomType(ts))), Tuple.of(IConstructor.class, (ts, n) -> randomADT(ts, n)) ).collect(Collectors.toMap(t -> t.a, t -> t.b)); @Override public Stream<? extends Arguments> provideArguments(ExtensionContext context) { Method method = context.getTestMethod().get(); /* * If only factories and typestores are arguments, we generate as many tests as we have * value factory implementations (2). For the IValue argument we generate 100 tests and for * every additional IValue argument we multiply the number of tests by 10. */ long valueArity = Arrays.stream(method.getParameterTypes()).filter(x -> IValue.class.isAssignableFrom(x) || Type.class.isAssignableFrom(x)).count() - Arrays.stream(method.getParameters()).filter(x -> x.getAnnotation(GivenValue.class) != null).count(); int numberOfTests = Math.max(1, 100 * (int) Math.pow(10, valueArity - 1)); return Stream.of( factories[0], factories[1] ).flatMap(vf -> // all parameters share the same factory generateTypeStore(context).flatMap(ts -> Stream.iterate(arguments(method, vf, ts), p -> arguments(method, vf, ts)).limit(numberOfTests) ) ); } private static Type randomADT(TypeStore ts, ExpectedType n) { if (n != null) { Type result = readType(ts, n); if (result != null) { return result; } } Collection<Type> allADTs = ts.getAbstractDataTypes(); if (!allADTs.isEmpty()) { return allADTs.stream().skip(new Random().nextInt(allADTs.size())).findFirst().get(); } // note the side-effect in the type store! Type x = tf.abstractDataType(ts, "X"); tf.constructor(ts, x, "x"); return x; } /** * Generate the random argument for a single test method * @param method the declaration of the method under test * @param vf the valuefactory to use when generating values, also passed to parameters of type IValueFactory * @param ts the TypeStore to request ADTs from, randomly, also passed to parameters of type TypeStore * @return an Arguments instance for streaming into JUnits MethodSource interface. */ private Arguments arguments(Method method, IValueFactory vf, TypeStore ts) { previous = null; // never reuse arguments from a previous instance ArgumentsSeed argSeed = method.getAnnotation(ArgumentsSeed.class); if (argSeed != null) { gen.setSeed(argSeed.value()); } else if (seedProperty != null) { gen.setSeed(seed); } ArgumentsMaxDepth depth = method.getAnnotation(ArgumentsMaxDepth.class); ArgumentsMaxWidth width = method.getAnnotation(ArgumentsMaxWidth.class); return Arguments.of( Arrays.stream(method.getParameters()).map( cl -> argument( vf, ts, cl.getType(), cl.getAnnotation(ExpectedType.class), cl.getAnnotation(GivenValue.class), depth != null ? depth.value() : 5, width != null ? width.value() : 10 )).toArray().clone() ); } private static long hashSeed(String string) { long h = 1125899906842597L; // prime int len = string.length(); for (int i = 0; i < len; i++) { h = 31*h + string.charAt(i); } return h; } /** * Generate an argument to a vallang test function. `cls` can be any sub-type of IValue, * or TypeStore or IValueFactory. * @param vf the valuefactory to use when generating values, also passed to parameters of type IValueFactory * @param ts the TypeStore to request ADTs from, randomly, also passed to parameters of type TypeStore * @param cls the class type of the parameter to generate an input for * @return a random object which is assignable to cls */ private Object argument(IValueFactory vf, TypeStore ts, Class<?> cls, ExpectedType expected, GivenValue givenValue, int depth, int width) { if (givenValue != null) { try { if (expected != null) { return new StandardTextReader().read(vf, ts, readType(ts, expected), new StringReader(givenValue.value())); } else { return new StandardTextReader().read(vf, new StringReader(givenValue.value())); } } catch (FactTypeUseException | IOException e) { System.err.println("[WARNING] failed to parse given value: " + givenValue.value()); } } if (cls.isAssignableFrom(IValueFactory.class)) { return vf; } else if (cls.isAssignableFrom(TypeStore.class)) { return ts; } else if (cls.isAssignableFrom(Type.class)) { return TypeFactory.getInstance().randomType(ts, depth); } else if (cls.isAssignableFrom(TypeFactory.class)) { return TypeFactory.getInstance(); } else if (IValue.class.isAssignableFrom(cls)) { return generateValue(vf, ts, cls.asSubclass(IValue.class), expected, depth, width); } else { throw new IllegalArgumentException(cls + " is not assignable from IValue, IValueFactory, TypeStore or TypeFactory"); } } /** * Generate a random IValue instance * * @param vf the valuefactory/randomgenerator to use * @param ts the TypeStore to draw ADT constructors from * @param cl the `cl` (sub-type of `IValue`) to be assignable to * @param noAnnotations * @return an instance assignable to `cl` */ private IValue generateValue(IValueFactory vf, TypeStore ts, Class<? extends IValue> cl, ExpectedType expected, int depth, int width) { Type expectedType = expected != null ? readType(ts, expected) : types.getOrDefault(cl, (x, n) -> tf.valueType()).apply(ts, expected); if (previous != null && gen.nextInt(4) == 0 && previous.getType().isSubtypeOf(expectedType)) { return gen.nextBoolean() ? previous : reinstantiate(vf, ts, previous); } return (previous = gen.generate(expectedType, vf, ts, Collections.emptyMap(), depth, width)); } private static Type readType(TypeStore ts, ExpectedType expected) { try { return tf.fromString(ts, new StringReader(expected.value())); } catch (IOException e) { return null; } } /** * Produces a value which equals the input `val` but is not the same object reference. * It does this by serializing the value and parsing it again with the same expected type. * @return a value equals to `val` (val.equals(returnValue)) but not reference equal (val != returnValue) */ private IValue reinstantiate(IValueFactory vf, TypeStore ts, IValue val) { try { return new StandardTextReader().read(vf, ts, val.getType(), new StringReader(val.toString())); } catch (FactTypeUseException | FactParseError | IOException e) { System.err.println("WARNING: value reinstantation via serialization failed for ["+val+"] because + \""+e.getMessage()+"\". Reusing reference."); return val; } } /** * Generates a TypeStore instance by importing the static `store` field of the class-under-test (if-present) * in a fresh TypeStore. Otherwise it generates a fresh and empty TypeStore. * @param context * @return */ private Stream<TypeStore> generateTypeStore(ExtensionContext context) { try { return Stream.of(new TypeStore((TypeStore) context.getRequiredTestClass().getField("store").get("null"))); } catch (NoSuchFieldException | SecurityException | IllegalArgumentException | IllegalAccessException e) { return Stream.of(new TypeStore()); } } }
documenting seed property during test run
src/test/java/io/usethesource/vallang/ValueProvider.java
documenting seed property during test run
Java
epl-1.0
35671a2977616ba5217df9f52e2b0f6ef698275e
0
bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs,bfg-repo-cleaner-demos/eclipselink.runtime-bfg-strip-big-blobs
/******************************************************************************* * Copyright (c) 2011, 2012 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * tware - initial implementation as part of extensibility feature * 01/11/2013-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support * 01/16/2013-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support * 02/04/2013-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support ******************************************************************************/ package org.eclipse.persistence.internal.jpa; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.persistence.Cache; import javax.persistence.EntityGraph; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.FlushModeType; import javax.persistence.PersistenceException; import javax.persistence.PersistenceUnitUtil; import javax.persistence.Query; import javax.persistence.SynchronizationType; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.metamodel.Metamodel; import org.eclipse.persistence.config.ReferenceMode; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.PersistenceUnitLoadingException; import org.eclipse.persistence.internal.indirection.IndirectionPolicy; import org.eclipse.persistence.internal.localization.ExceptionLocalization; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.jpa.JpaEntityManagerFactory; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.ForeignReferenceMapping; import org.eclipse.persistence.queries.AttributeGroup; import org.eclipse.persistence.queries.DatabaseQuery; import org.eclipse.persistence.queries.FetchGroupTracker; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.sessions.DatabaseSession; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.sessions.broker.SessionBroker; import org.eclipse.persistence.sessions.coordination.CommandManager; import org.eclipse.persistence.internal.sessions.coordination.MetadataRefreshCommand; import org.eclipse.persistence.sessions.factories.SessionManager; import org.eclipse.persistence.sessions.server.Server; import org.eclipse.persistence.sessions.server.ServerSession; /** * Wraps our implementation of EntityManagerFactory * Most operations are forwarded to the delegate. This wrapper is used to enable * the refreshMetadata functionality which allows you to switch the underlying metadata for * an EMF after deploy time. * @author tware * */ public class EntityManagerFactoryImpl implements EntityManagerFactory, PersistenceUnitUtil, JpaEntityManagerFactory { protected EntityManagerFactoryDelegate delegate; /** * Returns the id of the entity. A generated id is not guaranteed to be * available until after the database insert has occurred. Returns null if * the entity does not yet have an id * * @param entity * @return id of the entity * @throws IllegalStateException * if the entity is found not to be an entity. */ public static Object getIdentifier(Object entity, AbstractSession session) { ClassDescriptor descriptor = session.getDescriptor(entity); if (descriptor == null) { throw new IllegalArgumentException(ExceptionLocalization.buildMessage("jpa_persistence_util_non_persistent_class", new Object[] { entity })); } if (descriptor.getCMPPolicy() != null) { return descriptor.getCMPPolicy().createPrimaryKeyInstance(entity, session); } else { // 308950: Alternatively, CacheImpl.getId(entity) handles a null CMPPolicy case for weaved and unweaved domain object throw new IllegalArgumentException(ExceptionLocalization.buildMessage("jpa_persistence_util_non_persistent_class", new Object[] { entity })); } } /** * Determine the load state of an entity belonging to the persistence unit. * This method can be used to determine the load state of an entity passed * as a reference. An entity is considered loaded if all attributes for * which FetchType EAGER has been specified have been loaded. The * isLoaded(Object, String) method should be used to determine the load * state of an attribute. Not doing so might lead to unintended loading of * state. * * @param entity * whose load state is to be determined * @return false if the entity has not been loaded, else true. */ public static Boolean isLoaded(Object entity, AbstractSession session) { ClassDescriptor descriptor = session.getDescriptor(entity); if (descriptor == null) { return null; } List<DatabaseMapping> mappings = descriptor.getMappings(); Iterator<DatabaseMapping> i = mappings.iterator(); while (i.hasNext()) { DatabaseMapping mapping = i.next(); if (!mapping.isLazy() && !isLoaded(entity, mapping.getAttributeName(), mapping)) { return false; } } return true; } /** * Determine the load state of a given persistent attribute of an entity * belonging to the persistence unit. * * @param entity * containing the attribute * @param attributeName * name of attribute whose load state is to be determined * @return false if entity's state has not been loaded or if the attribute * state has not been loaded, otherwise true */ public static Boolean isLoaded(Object entity, String attributeName, AbstractSession session) { ClassDescriptor descriptor = session.getDescriptor(entity); if (descriptor == null) { return null; } if (descriptor.hasFetchGroupManager()){ if (!descriptor.getFetchGroupManager().isAttributeFetched(entity, attributeName)){ return false; } } DatabaseMapping mapping = descriptor.getMappingForAttributeName(attributeName); if (mapping == null) { return null; } return isLoaded(entity, attributeName, mapping); } /** * Check whether a named attribute on a given entity with a given mapping * has been loaded. * * This method will check the valueholder or indirect collection for LAZY * ForeignReferenceMappings to see if has been instantiated and otherwise * check the fetch group. * * @param entity * @param attributeName * @param mapping * @return */ public static boolean isLoaded(Object entity, String attributeName, DatabaseMapping mapping) { if (mapping.isForeignReferenceMapping()) { if (((ForeignReferenceMapping) mapping).isLazy()) { Object value = mapping.getAttributeValueFromObject(entity); IndirectionPolicy policy = ((ForeignReferenceMapping) mapping).getIndirectionPolicy(); return policy.objectIsInstantiated(value); } } if (entity instanceof FetchGroupTracker) { return ((FetchGroupTracker) entity)._persistence_isAttributeFetched(attributeName); } else { return true; } } /** * Will return an instance of the Factory. Should only be called by * EclipseLink. * * @param serverSession */ public EntityManagerFactoryImpl(AbstractSession session) { delegate = new EntityManagerFactoryDelegate(session, this); } public EntityManagerFactoryImpl(EntityManagerSetupImpl setupImpl, Map properties) { delegate = new EntityManagerFactoryDelegate(setupImpl, properties, this); } /** * Create a dynamic persistence unit which does not use the persistence.xml. * Instead all configuration is driven from the provided persistence unit * properties and descriptors. */ public EntityManagerFactoryImpl(String persistenceUnitName, Map<String, Object> properties, List<ClassDescriptor> descriptors) { delegate = new EntityManagerFactoryDelegate(persistenceUnitName, properties, descriptors, this); } /** * ADVANCED: * Re-bootstrap this factory. This method will rebuild the EntityManagerFactory. It should be used * in conjunction with a MetadataSource to allow mappings to be changed in a running system. All existing * EntityMangers will continue to function with the old metadata, but new factories will use the new metadata. * @param properties */ public void refreshMetadata(Map properties){ EntityManagerSetupImpl setupImpl = delegate.getSetupImpl(); if (setupImpl == null){ throw PersistenceUnitLoadingException.cannotRefreshEntityManagerFactoryCreatedFromSession(delegate.getServerSession().getName()); } String sessionName = setupImpl.getSessionName(); Map existingProperties = delegate.getProperties(); Map deployProperties = new HashMap(); deployProperties.putAll(existingProperties); if (properties != null){ deployProperties.putAll(properties); } EntityManagerSetupImpl newSetupImpl = setupImpl.refreshMetadata(properties); EntityManagerFactoryDelegate oldDelegate = delegate; delegate = new EntityManagerFactoryDelegate(newSetupImpl, deployProperties, this); // This code has been added to allow validation to occur without actually calling createEntityManager // RCM refresh command requires the DEPLOY_ON_STARTUP property is set to true so the listener can be added to the session. try{ if (newSetupImpl.shouldGetSessionOnCreateFactory(deployProperties)) { ServerSession session = getServerSession(); CommandManager rcm = session.getCommandManager(); if (rcm != null && newSetupImpl.shouldSendMetadataRefreshCommand(deployProperties)) { MetadataRefreshCommand command = new MetadataRefreshCommand(properties); rcm.propagateCommand(command); } session.setRefreshMetadataListener(newSetupImpl); } } catch (RuntimeException ex) { if(delegate != null) { delegate.close(); } else { newSetupImpl.undeploy(); } synchronized(EntityManagerFactoryProvider.emSetupImpls){ // bring back the old emSetupImpl and session EntityManagerFactoryProvider.emSetupImpls.put(sessionName, setupImpl); SessionManager.getManager().getSessions().put(sessionName, setupImpl.getSession()); setupImpl.setIsMetadataExpired(false); } delegate = oldDelegate; throw ex; } } /** * INTERNAL: Returns the SessionBroker that the Factory will be using and * initializes it if it is not available. This method makes use of the * partially constructed session stored in our setupImpl and completes its * construction * TODO: should throw IllegalStateException if not SessionBroker */ public SessionBroker getSessionBroker() { return delegate.getSessionBroker(); } /** * INTERNAL: Returns the ServerSession that the Factory will be using and * initializes it if it is not available. This method makes use of the * partially constructed session stored in our setupImpl and completes its * construction */ public ServerSession getServerSession() { return delegate.getServerSession(); } /** * Closes this factory, releasing any resources that might be held by this * factory. After invoking this method, all methods on the instance will * throw an {@link IllegalStateException}, except for {@link #isOpen}, which * will return <code>false</code>. */ public synchronized void close() { delegate.close(); } /** * Indicates whether or not this factory is open. Returns <code>true</code> * until a call to {@link #close} is made. */ public boolean isOpen() { return delegate.isOpen(); } /** * PUBLIC: Returns an EntityManager for this deployment. */ public EntityManager createEntityManager() { return createEntityManagerImpl(null, null); } /** * PUBLIC: Returns an EntityManager for this deployment. */ public EntityManager createEntityManager(Map properties) { return createEntityManagerImpl(properties, null); } public EntityManager createEntityManager(SynchronizationType synchronizationType) { return createEntityManagerImpl(null, synchronizationType); } public EntityManager createEntityManager(SynchronizationType synchronizationType, Map map) { return createEntityManagerImpl(map, synchronizationType); } protected EntityManagerImpl createEntityManagerImpl(Map properties, SynchronizationType syncType) { EntityManagerSetupImpl setupImpl = delegate.getSetupImpl(); if (setupImpl != null) { if (setupImpl.isMetadataExpired()) { String sessionName = setupImpl.getSessionName(); EntityManagerSetupImpl storedImpl = null; synchronized (EntityManagerFactoryProvider.emSetupImpls){ storedImpl = EntityManagerFactoryProvider.emSetupImpls.get(sessionName); } if (storedImpl != null) { delegate = new EntityManagerFactoryDelegate(storedImpl, delegate.getProperties(), this); } } } return delegate.createEntityManagerImpl(properties, syncType); } /** * Gets the underlying implementation of the EntityManagerFactory. * This method will return a version of EntityManagerFactory that is * based on the available metadata at the time it is called. Future calls * to refresh will not affect that metadata on this EntityManagerFactory. * @return */ public EntityManagerFactoryDelegate unwrap(){ return delegate; } protected void verifyOpen() { delegate.verifyOpen(); } protected void finalize() throws Throwable { delegate = null; } /** * The method return user defined property passed in from * EntityManagerFactory. */ public Object getProperty(String name) { return delegate.getProperty(name); } /** * Return default join existing transaction property, allows reading through * write connection. */ public boolean getBeginEarlyTransaction() { return delegate.getBeginEarlyTransaction(); } /** * Set default join existing transaction property, allows reading through * write connection. */ public void setBeginEarlyTransaction(boolean beginEarlyTransaction) { delegate.setBeginEarlyTransaction(beginEarlyTransaction); } /** * Return default property, allows flush before query to be avoided. */ public FlushModeType getFlushMode() { return delegate.getFlushMode(); } /** * Set default property, allows flush before query to be avoided. */ public void setFlushMode(FlushModeType flushMode) { delegate.setFlushMode(flushMode); } /** * Return default property, allows weak unit of work references. */ public ReferenceMode getReferenceMode() { return delegate.getReferenceMode(); } /** * Set default property, allows weak unit of work references. */ public void setReferenceMode(ReferenceMode referenceMode) { delegate.setReferenceMode(referenceMode); } /** * Return default property to avoid resuming unit of work if going to be * closed on commit anyway. */ public boolean getCloseOnCommit() { return delegate.getCloseOnCommit(); } /** * Set default property to avoid resuming unit of work if going to be closed * on commit anyway. */ public void setCloseOnCommit(boolean closeOnCommit) { delegate.setCloseOnCommit(closeOnCommit); } /** * Return default property to avoid discover new objects in unit of work if * application always uses persist. */ public boolean getPersistOnCommit() { return delegate.getPersistOnCommit(); } /** * Return interface providing access to utility methods for the persistence * unit. * * @return PersistenceUnitUtil interface * @throws IllegalStateException * if the entity manager factory has been closed. */ public PersistenceUnitUtil getPersistenceUnitUtil() { return delegate.getPersistenceUnitUtil(); } /** * Set default property to avoid discover new objects in unit of work if * application always uses persist. */ public void setPersistOnCommit(boolean persistOnCommit) { delegate.setPersistOnCommit(persistOnCommit); } /** * Return default property to avoid discover new objects in unit of work if * application always uses persist. */ public boolean getCommitWithoutPersistRules() { return delegate.getCommitWithoutPersistRules(); } /** * Set default property to avoid discover new objects in unit of work if * application always uses persist. */ public void setCommitWithoutPersistRules(boolean commitWithoutPersistRules) { delegate.setCommitWithoutPersistRules(commitWithoutPersistRules); } /** * Return the default FlashClearCache mode to be used. Relevant only in case * call to flush method followed by call to clear method. * * @see org.eclipse.persistence.config.FlushClearCache */ public String getFlushClearCache() { return delegate.getFlushClearCache(); } /** * Set the default FlashClearCache mode to be used. Relevant only in case * call to flush method followed by call to clear method. * * @see org.eclipse.persistence.config.FlushClearCache */ public void setFlushClearCache(String flushClearCache) { delegate.setFlushClearCache(flushClearCache); } /** * Return the default to determine if does-exist should be performed on * persist. */ public boolean shouldValidateExistence() { return delegate.shouldValidateExistence(); } /** * Set the default to determine if does-exist should be performed on * persist. */ public void setShouldValidateExistence(boolean shouldValidateExistence) { delegate.setShouldValidateExistence(shouldValidateExistence); } /** * Access the cache that is associated with the entity manager * factory (the "second level cache"). * @return instance of the <code>Cache</code> interface * @throws IllegalStateException if the entity manager factory has been closed * @see javax.persistence.EntityManagerFactory#getCache() * @since Java Persistence 2.0 */ public Cache getCache() { return delegate.getCache(); } /** * @see javax.persistence.EntityManagerFactory#getProperties() * @since Java Persistence API 2.0 */ public Map<String, Object> getProperties() { return delegate.getProperties(); } public DatabaseSessionImpl getDatabaseSession() { return delegate.getDatabaseSession(); } /** * @see javax.persistence.EntityManagerFactory#getCriteriaBuilder() * @since Java Persistence 2.0 */ public CriteriaBuilder getCriteriaBuilder() { return delegate.getCriteriaBuilder(); } /** * Return an instance of Metamodel interface for access to the metamodel of * the persistence unit. * * @return Metamodel instance * @throws IllegalStateException * if the entity manager factory has been closed. * @see javax.persistence.EntityManagerFactory#getMetamodel() * @since Java Persistence 2.0 */ public Metamodel getMetamodel() { return delegate.getMetamodel(); } /** * INTERNAL: Convenience function to allow us to reset the Metamodel in the * possible case that we want to regenerate it. This function is outside of * the JPA 2.0 specification. * * @param aMetamodel * @since Java Persistence 2.0 */ public void setMetamodel(Metamodel aMetamodel) { delegate.setMetamodel(aMetamodel); } /** * Determine the load state of a given persistent attribute of an entity * belonging to the persistence unit. * * @param entity * containing the attribute * @param attributeName * name of attribute whose load state is to be determined * @return false if entity's state has not been loaded or if the attribute * state has not been loaded, otherwise true */ public boolean isLoaded(Object entity, String attributeName) { return delegate.isLoaded(entity, attributeName); } /** * Determine the load state of an entity belonging to the persistence unit. * This method can be used to determine the load state of an entity passed * as a reference. An entity is considered loaded if all attributes for * which FetchType EAGER has been specified have been loaded. The * isLoaded(Object, String) method should be used to determine the load * state of an attribute. Not doing so might lead to unintended loading of * state. * * @param entity * whose load state is to be determined * @return false if the entity has not been loaded, else true. */ public boolean isLoaded(Object entity) { return delegate.isLoaded(entity); } /** * Returns the id of the entity. A generated id is not guaranteed to be * available until after the database insert has occurred. Returns null if * the entity does not yet have an id * * @param entity * @return id of the entity * @throws IllegalStateException * if the entity is found not to be an entity. */ public Object getIdentifier(Object entity) { return delegate.getIdentifier(entity); } /** * ADVANCED: * Return if updates should be ordered by primary key to avoid possible database deadlocks. */ public boolean shouldOrderUpdates() { return delegate.shouldOrderUpdates(); } /** * ADVANCED: * Set updates should be ordered by primary key to avoid possible database deadlocks. */ public void setShouldOrderUpdates(boolean shouldOrderUpdates) { delegate.setShouldOrderUpdates(shouldOrderUpdates); } public void addNamedQuery(String name, Query query) { QueryImpl queryImpl = query.unwrap(QueryImpl.class); DatabaseQuery unwrapped = (DatabaseQuery) queryImpl.getDatabaseQueryInternal().clone(); if (queryImpl.lockMode != null){ ((ObjectLevelReadQuery)unwrapped).setLockModeType(queryImpl.lockMode.name(), getServerSession()); } if (unwrapped.isReadQuery()){ ((ReadQuery)unwrapped).setInternalMax((queryImpl.getMaxResultsInternal())); } this.getServerSession().addQuery(name, unwrapped, true); } public <T> T unwrap(Class<T> cls) { if (cls.equals(JpaEntityManagerFactory.class) || cls.equals(EntityManagerFactoryImpl.class)) { return (T) this; }else if (cls.equals(EntityManagerFactoryDelegate.class)) { return (T) this.delegate; }else if (cls.equals(Session.class) || cls.equals(AbstractSession.class)) { return (T) this.delegate.getAbstractSession(); } else if (cls.equals(DatabaseSession.class) || cls.equals(DatabaseSessionImpl.class)) { return (T) this.getDatabaseSession(); } else if (cls.equals(Server.class) || cls.equals(ServerSession.class)) { return (T) this.getServerSession(); } else if (cls.equals(SessionBroker.class)) { return (T) this.getSessionBroker(); } throw new PersistenceException(ExceptionLocalization.buildMessage("unable_to_unwrap_jpa", new String[]{EntityManagerFactory.class.getName(),cls.getName()})); } public <T> void addNamedEntityGraph(String graphName, EntityGraph<T> entityGraph) { AttributeGroup group = ((EntityGraphImpl)entityGraph).getAttributeGroup().clone(); group.setName(graphName); this.getServerSession().getAttributeGroups().put(graphName, group); this.getServerSession().getDescriptor(((EntityGraphImpl)entityGraph).getClassType()).addAttributeGroup(group); } }
jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/EntityManagerFactoryImpl.java
/******************************************************************************* * Copyright (c) 2011, 2012 Oracle and/or its affiliates. All rights reserved. * This program and the accompanying materials are made available under the * terms of the Eclipse Public License v1.0 and Eclipse Distribution License v. 1.0 * which accompanies this distribution. * The Eclipse Public License is available at http://www.eclipse.org/legal/epl-v10.html * and the Eclipse Distribution License is available at * http://www.eclipse.org/org/documents/edl-v10.php. * * Contributors: * tware - initial implementation as part of extensibility feature * 01/11/2013-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support * 01/16/2013-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support * 02/04/2013-2.5 Guy Pelletier * - 389090: JPA 2.1 DDL Generation Support ******************************************************************************/ package org.eclipse.persistence.internal.jpa; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import javax.persistence.Cache; import javax.persistence.EntityGraph; import javax.persistence.EntityManager; import javax.persistence.EntityManagerFactory; import javax.persistence.FlushModeType; import javax.persistence.PersistenceException; import javax.persistence.PersistenceUnitUtil; import javax.persistence.Query; import javax.persistence.SynchronizationType; import javax.persistence.criteria.CriteriaBuilder; import javax.persistence.metamodel.Metamodel; import org.eclipse.persistence.config.ReferenceMode; import org.eclipse.persistence.descriptors.ClassDescriptor; import org.eclipse.persistence.exceptions.PersistenceUnitLoadingException; import org.eclipse.persistence.internal.indirection.IndirectionPolicy; import org.eclipse.persistence.internal.localization.ExceptionLocalization; import org.eclipse.persistence.internal.sessions.AbstractSession; import org.eclipse.persistence.internal.sessions.DatabaseSessionImpl; import org.eclipse.persistence.jpa.JpaEntityManagerFactory; import org.eclipse.persistence.mappings.DatabaseMapping; import org.eclipse.persistence.mappings.ForeignReferenceMapping; import org.eclipse.persistence.queries.AttributeGroup; import org.eclipse.persistence.queries.DatabaseQuery; import org.eclipse.persistence.queries.FetchGroupTracker; import org.eclipse.persistence.queries.ObjectLevelReadQuery; import org.eclipse.persistence.queries.ReadQuery; import org.eclipse.persistence.sessions.DatabaseSession; import org.eclipse.persistence.sessions.Session; import org.eclipse.persistence.sessions.broker.SessionBroker; import org.eclipse.persistence.sessions.coordination.CommandManager; import org.eclipse.persistence.internal.sessions.coordination.MetadataRefreshCommand; import org.eclipse.persistence.sessions.factories.SessionManager; import org.eclipse.persistence.sessions.server.Server; import org.eclipse.persistence.sessions.server.ServerSession; /** * Wraps our implementation of EntityManagerFactory * Most operations are forwarded to the delegate. This wrapper is used to enable * the refreshMetadata functionality which allows you to switch the underlying metadata for * an EMF after deploy time. * @author tware * */ public class EntityManagerFactoryImpl implements EntityManagerFactory, PersistenceUnitUtil, JpaEntityManagerFactory { protected EntityManagerFactoryDelegate delegate; /** * Returns the id of the entity. A generated id is not guaranteed to be * available until after the database insert has occurred. Returns null if * the entity does not yet have an id * * @param entity * @return id of the entity * @throws IllegalStateException * if the entity is found not to be an entity. */ public static Object getIdentifier(Object entity, AbstractSession session) { ClassDescriptor descriptor = session.getDescriptor(entity); if (descriptor == null) { throw new IllegalArgumentException(ExceptionLocalization.buildMessage("jpa_persistence_util_non_persistent_class", new Object[] { entity })); } if (descriptor.getCMPPolicy() != null) { return descriptor.getCMPPolicy().createPrimaryKeyInstance(entity, session); } else { // 308950: Alternatively, CacheImpl.getId(entity) handles a null CMPPolicy case for weaved and unweaved domain object throw new IllegalArgumentException(ExceptionLocalization.buildMessage("jpa_persistence_util_non_persistent_class", new Object[] { entity })); } } /** * Determine the load state of an entity belonging to the persistence unit. * This method can be used to determine the load state of an entity passed * as a reference. An entity is considered loaded if all attributes for * which FetchType EAGER has been specified have been loaded. The * isLoaded(Object, String) method should be used to determine the load * state of an attribute. Not doing so might lead to unintended loading of * state. * * @param entity * whose load state is to be determined * @return false if the entity has not been loaded, else true. */ public static Boolean isLoaded(Object entity, AbstractSession session) { ClassDescriptor descriptor = session.getDescriptor(entity); if (descriptor == null) { return null; } List<DatabaseMapping> mappings = descriptor.getMappings(); Iterator<DatabaseMapping> i = mappings.iterator(); while (i.hasNext()) { DatabaseMapping mapping = i.next(); if (!mapping.isLazy() && !isLoaded(entity, mapping.getAttributeName(), mapping)) { return false; } } return true; } /** * Determine the load state of a given persistent attribute of an entity * belonging to the persistence unit. * * @param entity * containing the attribute * @param attributeName * name of attribute whose load state is to be determined * @return false if entity's state has not been loaded or if the attribute * state has not been loaded, otherwise true */ public static Boolean isLoaded(Object entity, String attributeName, AbstractSession session) { ClassDescriptor descriptor = session.getDescriptor(entity); if (descriptor == null) { return null; } if (descriptor.hasFetchGroupManager()){ if (!descriptor.getFetchGroupManager().isAttributeFetched(entity, attributeName)){ return false; } } DatabaseMapping mapping = descriptor.getMappingForAttributeName(attributeName); if (mapping == null) { return null; } return isLoaded(entity, attributeName, mapping); } /** * Check whether a named attribute on a given entity with a given mapping * has been loaded. * * This method will check the valueholder or indirect collection for LAZY * ForeignReferenceMappings to see if has been instantiated and otherwise * check the fetch group. * * @param entity * @param attributeName * @param mapping * @return */ public static boolean isLoaded(Object entity, String attributeName, DatabaseMapping mapping) { if (mapping.isForeignReferenceMapping()) { if (((ForeignReferenceMapping) mapping).isLazy()) { Object value = mapping.getAttributeValueFromObject(entity); IndirectionPolicy policy = ((ForeignReferenceMapping) mapping).getIndirectionPolicy(); return policy.objectIsInstantiated(value); } } if (entity instanceof FetchGroupTracker) { return ((FetchGroupTracker) entity)._persistence_isAttributeFetched(attributeName); } else { return true; } } /** * Will return an instance of the Factory. Should only be called by * EclipseLink. * * @param serverSession */ public EntityManagerFactoryImpl(AbstractSession session) { delegate = new EntityManagerFactoryDelegate(session, this); } public EntityManagerFactoryImpl(EntityManagerSetupImpl setupImpl, Map properties) { delegate = new EntityManagerFactoryDelegate(setupImpl, properties, this); } /** * Create a dynamic persistence unit which does not use the persistence.xml. * Instead all configuration is driven from the provided persistence unit * properties and descriptors. */ public EntityManagerFactoryImpl(String persistenceUnitName, Map<String, Object> properties, List<ClassDescriptor> descriptors) { delegate = new EntityManagerFactoryDelegate(persistenceUnitName, properties, descriptors, this); } /** * ADVANCED: * Re-bootstrap this factory. This method will rebuild the EntityManagerFactory. It should be used * in conjunction with a MetadataSource to allow mappings to be changed in a running system. All existing * EntityMangers will continue to function with the old metadata, but new factories will use the new metadata. * @param properties */ public void refreshMetadata(Map properties){ EntityManagerSetupImpl setupImpl = delegate.getSetupImpl(); if (setupImpl == null){ throw PersistenceUnitLoadingException.cannotRefreshEntityManagerFactoryCreatedFromSession(delegate.getServerSession().getName()); } String sessionName = setupImpl.getSessionName(); Map existingProperties = delegate.getProperties(); Map deployProperties = new HashMap(); deployProperties.putAll(existingProperties); if (properties != null){ deployProperties.putAll(properties); } EntityManagerSetupImpl newSetupImpl = setupImpl.refreshMetadata(properties); EntityManagerFactoryDelegate oldDelegate = delegate; delegate = new EntityManagerFactoryDelegate(newSetupImpl, deployProperties, this); // This code has been added to allow validation to occur without actually calling createEntityManager // RCM refresh command requires the DEPLOY_ON_STARTUP property is set to true so the listener can be added to the session. try{ if (newSetupImpl.shouldGetSessionOnCreateFactory(deployProperties)) { ServerSession session = getServerSession(); CommandManager rcm = session.getCommandManager(); if (rcm != null && newSetupImpl.shouldSendMetadataRefreshCommand(deployProperties)) { MetadataRefreshCommand command = new MetadataRefreshCommand(properties); rcm.propagateCommand(command); } session.setRefreshMetadataListener(newSetupImpl); } } catch (RuntimeException ex) { if(delegate != null) { delegate.close(); } else { newSetupImpl.undeploy(); } synchronized(EntityManagerFactoryProvider.emSetupImpls){ // bring back the old emSetupImpl and session EntityManagerFactoryProvider.emSetupImpls.put(sessionName, setupImpl); SessionManager.getManager().getSessions().put(sessionName, setupImpl.getSession()); setupImpl.setIsMetadataExpired(false); } delegate = oldDelegate; throw ex; } } /** * INTERNAL: Returns the SessionBroker that the Factory will be using and * initializes it if it is not available. This method makes use of the * partially constructed session stored in our setupImpl and completes its * construction * TODO: should throw IllegalStateException if not SessionBroker */ public SessionBroker getSessionBroker() { return delegate.getSessionBroker(); } /** * INTERNAL: Returns the ServerSession that the Factory will be using and * initializes it if it is not available. This method makes use of the * partially constructed session stored in our setupImpl and completes its * construction */ public ServerSession getServerSession() { return delegate.getServerSession(); } /** * Closes this factory, releasing any resources that might be held by this * factory. After invoking this method, all methods on the instance will * throw an {@link IllegalStateException}, except for {@link #isOpen}, which * will return <code>false</code>. */ public synchronized void close() { delegate.close(); } /** * Indicates whether or not this factory is open. Returns <code>true</code> * until a call to {@link #close} is made. */ public boolean isOpen() { return delegate.isOpen(); } /** * PUBLIC: Returns an EntityManager for this deployment. */ public EntityManager createEntityManager() { return createEntityManagerImpl(null, null); } /** * PUBLIC: Returns an EntityManager for this deployment. */ public EntityManager createEntityManager(Map properties) { return createEntityManagerImpl(properties, null); } public EntityManager createEntityManager(SynchronizationType synchronizationType) { return createEntityManagerImpl(null, synchronizationType); } public EntityManager createEntityManager(SynchronizationType synchronizationType, Map map) { return createEntityManagerImpl(map, synchronizationType); } protected EntityManagerImpl createEntityManagerImpl(Map properties, SynchronizationType syncType) { EntityManagerSetupImpl setupImpl = delegate.getSetupImpl(); if (setupImpl != null) { if (setupImpl.isMetadataExpired()) { String sessionName = setupImpl.getSessionName(); EntityManagerSetupImpl storedImpl = null; synchronized (EntityManagerFactoryProvider.emSetupImpls){ storedImpl = EntityManagerFactoryProvider.emSetupImpls.get(sessionName); } if (storedImpl != null) { delegate = new EntityManagerFactoryDelegate(storedImpl, delegate.getProperties(), this); } } } return delegate.createEntityManagerImpl(properties, syncType); } /** * Gets the underlying implementation of the EntityManagerFactory. * This method will return a version of EntityManagerFactory that is * based on the available metadata at the time it is called. Future calls * to refresh will not affect that metadata on this EntityManagerFactory. * @return */ public EntityManagerFactoryDelegate unwrap(){ return delegate; } protected void verifyOpen() { delegate.verifyOpen(); } protected void finalize() throws Throwable { delegate = null; } /** * The method return user defined property passed in from * EntityManagerFactory. */ public Object getProperty(String name) { return delegate.getProperty(name); } /** * Return default join existing transaction property, allows reading through * write connection. */ public boolean getBeginEarlyTransaction() { return delegate.getBeginEarlyTransaction(); } /** * Set default join existing transaction property, allows reading through * write connection. */ public void setBeginEarlyTransaction(boolean beginEarlyTransaction) { delegate.setBeginEarlyTransaction(beginEarlyTransaction); } /** * Return default property, allows flush before query to be avoided. */ public FlushModeType getFlushMode() { return delegate.getFlushMode(); } /** * Set default property, allows flush before query to be avoided. */ public void setFlushMode(FlushModeType flushMode) { delegate.setFlushMode(flushMode); } /** * Return default property, allows weak unit of work references. */ public ReferenceMode getReferenceMode() { return delegate.getReferenceMode(); } /** * Set default property, allows weak unit of work references. */ public void setReferenceMode(ReferenceMode referenceMode) { delegate.setReferenceMode(referenceMode); } /** * Return default property to avoid resuming unit of work if going to be * closed on commit anyway. */ public boolean getCloseOnCommit() { return delegate.getCloseOnCommit(); } /** * Set default property to avoid resuming unit of work if going to be closed * on commit anyway. */ public void setCloseOnCommit(boolean closeOnCommit) { delegate.setCloseOnCommit(closeOnCommit); } /** * Return default property to avoid discover new objects in unit of work if * application always uses persist. */ public boolean getPersistOnCommit() { return delegate.getPersistOnCommit(); } /** * Return interface providing access to utility methods for the persistence * unit. * * @return PersistenceUnitUtil interface * @throws IllegalStateException * if the entity manager factory has been closed. */ public PersistenceUnitUtil getPersistenceUnitUtil() { return delegate.getPersistenceUnitUtil(); } /** * Set default property to avoid discover new objects in unit of work if * application always uses persist. */ public void setPersistOnCommit(boolean persistOnCommit) { delegate.setPersistOnCommit(persistOnCommit); } /** * Return default property to avoid discover new objects in unit of work if * application always uses persist. */ public boolean getCommitWithoutPersistRules() { return delegate.getCommitWithoutPersistRules(); } /** * Set default property to avoid discover new objects in unit of work if * application always uses persist. */ public void setCommitWithoutPersistRules(boolean commitWithoutPersistRules) { delegate.setCommitWithoutPersistRules(commitWithoutPersistRules); } /** * Return the default FlashClearCache mode to be used. Relevant only in case * call to flush method followed by call to clear method. * * @see org.eclipse.persistence.config.FlushClearCache */ public String getFlushClearCache() { return delegate.getFlushClearCache(); } /** * Set the default FlashClearCache mode to be used. Relevant only in case * call to flush method followed by call to clear method. * * @see org.eclipse.persistence.config.FlushClearCache */ public void setFlushClearCache(String flushClearCache) { delegate.setFlushClearCache(flushClearCache); } /** * Return the default to determine if does-exist should be performed on * persist. */ public boolean shouldValidateExistence() { return delegate.shouldValidateExistence(); } /** * Set the default to determine if does-exist should be performed on * persist. */ public void setShouldValidateExistence(boolean shouldValidateExistence) { delegate.setShouldValidateExistence(shouldValidateExistence); } /** * Access the cache that is associated with the entity manager * factory (the "second level cache"). * @return instance of the <code>Cache</code> interface * @throws IllegalStateException if the entity manager factory has been closed * @see javax.persistence.EntityManagerFactory#getCache() * @since Java Persistence 2.0 */ public Cache getCache() { return delegate.getCache(); } /** * @see javax.persistence.EntityManagerFactory#getProperties() * @since Java Persistence API 2.0 */ public Map<String, Object> getProperties() { return delegate.getProperties(); } public DatabaseSessionImpl getDatabaseSession() { return delegate.getDatabaseSession(); } /** * @see javax.persistence.EntityManagerFactory#getCriteriaBuilder() * @since Java Persistence 2.0 */ public CriteriaBuilder getCriteriaBuilder() { return delegate.getCriteriaBuilder(); } /** * Return an instance of Metamodel interface for access to the metamodel of * the persistence unit. * * @return Metamodel instance * @throws IllegalStateException * if the entity manager factory has been closed. * @see javax.persistence.EntityManagerFactory#getMetamodel() * @since Java Persistence 2.0 */ public Metamodel getMetamodel() { return delegate.getMetamodel(); } /** * INTERNAL: Convenience function to allow us to reset the Metamodel in the * possible case that we want to regenerate it. This function is outside of * the JPA 2.0 specification. * * @param aMetamodel * @since Java Persistence 2.0 */ public void setMetamodel(Metamodel aMetamodel) { delegate.setMetamodel(aMetamodel); } /** * Determine the load state of a given persistent attribute of an entity * belonging to the persistence unit. * * @param entity * containing the attribute * @param attributeName * name of attribute whose load state is to be determined * @return false if entity's state has not been loaded or if the attribute * state has not been loaded, otherwise true */ public boolean isLoaded(Object entity, String attributeName) { return delegate.isLoaded(entity, attributeName); } /** * Determine the load state of an entity belonging to the persistence unit. * This method can be used to determine the load state of an entity passed * as a reference. An entity is considered loaded if all attributes for * which FetchType EAGER has been specified have been loaded. The * isLoaded(Object, String) method should be used to determine the load * state of an attribute. Not doing so might lead to unintended loading of * state. * * @param entity * whose load state is to be determined * @return false if the entity has not been loaded, else true. */ public boolean isLoaded(Object entity) { return delegate.isLoaded(entity); } /** * Returns the id of the entity. A generated id is not guaranteed to be * available until after the database insert has occurred. Returns null if * the entity does not yet have an id * * @param entity * @return id of the entity * @throws IllegalStateException * if the entity is found not to be an entity. */ public Object getIdentifier(Object entity) { return delegate.getIdentifier(entity); } /** * ADVANCED: * Return if updates should be ordered by primary key to avoid possible database deadlocks. */ public boolean shouldOrderUpdates() { return delegate.shouldOrderUpdates(); } /** * ADVANCED: * Set updates should be ordered by primary key to avoid possible database deadlocks. */ public void setShouldOrderUpdates(boolean shouldOrderUpdates) { delegate.setShouldOrderUpdates(shouldOrderUpdates); } public void addNamedQuery(String name, Query query) { DatabaseQuery unwrapped = (DatabaseQuery) query.unwrap(DatabaseQuery.class).clone(); if (((QueryImpl)query).lockMode != null){ ((ObjectLevelReadQuery)unwrapped).setLockModeType(((QueryImpl)query).lockMode.name(), getServerSession()); } if (unwrapped.isReadQuery()){ ((ReadQuery)unwrapped).setInternalMax((((QueryImpl)query).getMaxResultsInternal())); } this.getServerSession().addQuery(name, unwrapped, true); } public <T> T unwrap(Class<T> cls) { if (cls.equals(JpaEntityManagerFactory.class) || cls.equals(EntityManagerFactoryImpl.class)) { return (T) this; }else if (cls.equals(EntityManagerFactoryDelegate.class)) { return (T) this.delegate; }else if (cls.equals(Session.class) || cls.equals(AbstractSession.class)) { return (T) this.delegate.getAbstractSession(); } else if (cls.equals(DatabaseSession.class) || cls.equals(DatabaseSessionImpl.class)) { return (T) this.getDatabaseSession(); } else if (cls.equals(Server.class) || cls.equals(ServerSession.class)) { return (T) this.getServerSession(); } else if (cls.equals(SessionBroker.class)) { return (T) this.getSessionBroker(); } throw new PersistenceException(ExceptionLocalization.buildMessage("unable_to_unwrap_jpa", new String[]{EntityManagerFactory.class.getName(),cls.getName()})); } public <T> void addNamedEntityGraph(String graphName, EntityGraph<T> entityGraph) { AttributeGroup group = ((EntityGraphImpl)entityGraph).getAttributeGroup().clone(); group.setName(graphName); this.getServerSession().getAttributeGroups().put(graphName, group); this.getServerSession().getDescriptor(((EntityGraphImpl)entityGraph).getClassType()).addAttributeGroup(group); } }
Bug 350728 - JPA 2.1 Dynamic Named Query definition Update to support wrapped queries on EE containers Former-commit-id: 8eb7cd0c1324d91d59d7a955978e85a841e7a648
jpa/org.eclipse.persistence.jpa/src/org/eclipse/persistence/internal/jpa/EntityManagerFactoryImpl.java
Bug 350728 - JPA 2.1 Dynamic Named Query definition Update to support wrapped queries on EE containers
Java
mpl-2.0
183066a797f8f284f30e6e8fcf28ea0bdbe43ad5
0
carlwilson/veraPDF-library
package org.verapdf.validation.profile.parser; import org.verapdf.exceptions.validationprofileparser.IncorrectImportPathException; import org.verapdf.exceptions.validationprofileparser.MissedHashTagException; import org.verapdf.exceptions.validationprofileparser.WrongSignatureException; import org.verapdf.validation.profile.model.*; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLStreamException; import java.io.File; import java.io.IOException; import java.util.*; /** * This class is for parse the validation profile xml file into java classes. * Created by bezrukov on 4/24/15. * * @author Maksim Bezrukov * @version 1.0 */ public final class ValidationProfileParser { private Set<String> profilesPaths; private ValidationProfile profile; private DocumentBuilder builder; private File resource; private ValidationProfileParser(File resourceFile, boolean isSignCheckOn) throws ParserConfigurationException, IOException, SAXException, IncorrectImportPathException, XMLStreamException, MissedHashTagException, WrongSignatureException { resource = resourceFile; if (isSignCheckOn) { ValidationProfileSignatureChecker checker = ValidationProfileSignatureChecker.newInstance(resource); if (!checker.isValidSignature()) { throw new WrongSignatureException("Unsigned validation profile: " + resource.getCanonicalPath()); } } DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); builder = factory.newDocumentBuilder(); factory.setIgnoringElementContentWhitespace(true); Document doc = builder.parse(resource); profilesPaths = new HashSet<>(); profilesPaths.add(resourceFile.getCanonicalPath()); Node root = doc.getDocumentElement(); root.normalize(); parseRoot(root, isSignCheckOn); } private void parseRoot(Node root, boolean isSignCheckOn) throws IOException, SAXException, IncorrectImportPathException { String model = null; String name = null; String description = null; String creator = null; String created = null; String hash = null; Map<String, List<Rule>> rules = new HashMap<>(); Node modelNode = root.getAttributes().getNamedItem("model"); if (modelNode != null){ model = modelNode.getNodeValue(); } NodeList children = root.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("name")) { name = child.getTextContent().trim(); } else if (childName.equals("description")) { description = child.getTextContent().trim(); } else if (childName.equals("creator")) { creator = child.getTextContent().trim(); } else if (childName.equals("created")) { created = child.getTextContent().trim(); } else if (childName.equals("hash") && isSignCheckOn) { hash = child.getTextContent().trim(); } else if (childName.equals("imports")) { parseImports(resource, child, rules); } else if (childName.equals("rules")) { parseRules(child, rules); } } profile = new ValidationProfile(model, name, description, creator, created, hash, rules); } private void parseImports(File sourceFile, Node imports, Map<String, List<Rule>> rules) throws SAXException, IncorrectImportPathException, IOException { NodeList children = imports.getChildNodes(); for(int i = 0; i < children.getLength(); ++i) { Node child = children.item(i); if (!child.getNodeName().equals("import")) { continue; } String path = child.getTextContent().trim(); File newFile = new File(sourceFile.getParent(), path); if (newFile == null || !newFile.exists()){ throw new IncorrectImportPathException("Can not find import with path \"" + path + "\" directly to the given profile."); } if(profilesPaths.contains(newFile.getCanonicalPath())){ continue; } profilesPaths.add(newFile.getCanonicalPath()); Document doc = builder.parse(newFile); NodeList children2 = doc.getDocumentElement().getChildNodes(); for(int j = 0; j < children2.getLength(); ++j){ Node child2 = children2.item(j); String name = child2.getNodeName(); if (name.equals("rules")) { parseRules(child2, rules); } else if (name.equals("imports")) { parseImports(newFile, child2, rules); } } } } private void parseRules(Node rules, Map<String, List<Rule>> rulesMap){ NodeList children = rules.getChildNodes(); for(int i = 0; i < children.getLength(); ++i) { Node child = children.item(i); if (child.getNodeName().equals("rule")) { Rule rule = parseRule(child); if (rulesMap.get(rule.getAttrObject()) == null) { List<Rule> newRules = new ArrayList<>(); rulesMap.put(rule.getAttrObject(), newRules); } rulesMap.get(rule.getAttrObject()).add(rule); } } } private Rule parseRule(Node rule){ String id = null; String object = null; String description = null; String test = null; RuleError ruleError = null; boolean isHasError = false; Reference reference = null; List<Fix> fix = new ArrayList<>(); Node idNode = rule.getAttributes().getNamedItem("id"); if (idNode != null){ id = idNode.getNodeValue(); } Node objectNode = rule.getAttributes().getNamedItem("object"); if (objectNode != null){ object = objectNode.getNodeValue(); } NodeList children = rule.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("description")) { description = child.getTextContent().trim(); } else if (childName.equals("test")) { test = child.getTextContent().trim(); } else if (childName.equals("error")) { ruleError = parseRuleError(child); isHasError = true; } else if (childName.equals("warning")) { ruleError = parseRuleError(child); } else if (childName.equals("reference")) { reference = parseReference(child); } else if (childName.equals("fix")) { fix.add(parseFix(child)); } } return new Rule(id, object, description, ruleError, isHasError, test, reference, fix); } private RuleError parseRuleError(Node err){ String message = null; List<String> argument = new ArrayList<>(); NodeList children = err.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("message")) { message = child.getTextContent().trim(); } else if (childName.equals("argument")) { argument.add(child.getTextContent().trim()); } } return new RuleError(message, argument); } private Reference parseReference(Node ref){ String specification = null; String clause = null; NodeList children = ref.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("specification")) { specification = child.getTextContent().trim(); } else if (childName.equals("clause")) { clause = child.getTextContent().trim(); } } return new Reference(specification, clause); } private Fix parseFix(Node fix){ String id = null; String description = null; FixInfo info = null; FixError error = null; Node idNode = fix.getAttributes().getNamedItem("id"); if (idNode != null){ id = idNode.getNodeValue(); } NodeList children = fix.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("description")) { description = child.getTextContent().trim(); } else if (childName.equals("info")) { NodeList nodelist = child.getChildNodes(); for (int j = 0; j < nodelist.getLength(); ++j){ if (nodelist.item(j).getNodeName().equals("message")){ info = new FixInfo(nodelist.item(j).getTextContent().trim()); break; } } } else if (childName.equals("error")) { NodeList nodelist = child.getChildNodes(); for (int j = 0; j < nodelist.getLength(); ++j){ if (nodelist.item(j).getNodeName().equals("message")){ error = new FixError(nodelist.item(j).getTextContent().trim()); break; } } } } return new Fix(id, description, info, error); } /** * Parses validation profile xml. * @param resourcePath - Path to the file for parse. * @return Validation profile represent in Java classes. * @throws ParserConfigurationException - if a DocumentBuilder cannot be created which satisfies the configuration requested. * @throws IOException - if any IO errors occur. * @throws SAXException - if any parse errors occur. * @throws IncorrectImportPathException - if validation profile contains incorrect input path * @throws MissedHashTagException - if validation profile must be signed, but it has no hash tag * @throws XMLStreamException - if exception occurs in parsing a validation profile with xml stream (in checking signature of the validation profile) * @throws WrongSignatureException - if validation profile must be signed, but it has wrong signature */ public static ValidationProfile parseValidationProfile(String resourcePath, boolean isSignCheckOn) throws ParserConfigurationException, SAXException, IOException, IncorrectImportPathException, MissedHashTagException, XMLStreamException, WrongSignatureException { return parseValidationProfile(new File(resourcePath), isSignCheckOn); } /** * * @param resourceFile - File for parse. * @return Validation profile represent in Java classes. * @throws ParserConfigurationException - if a DocumentBuilder cannot be created which satisfies the configuration requested. * @throws IOException - if any IO errors occur. * @throws SAXException - if any parse errors occur. * @throws IncorrectImportPathException - if validation profile contains incorrect input path * @throws MissedHashTagException - if validation profile must be signed, but it has no hash tag * @throws XMLStreamException - if exception occurs in parsing a validation profile with xml stream (in checking signature of the validation profile) * @throws WrongSignatureException - if validation profile must be signed, but it has wrong signature */ public static ValidationProfile parseValidationProfile(File resourceFile, boolean isSignCheckOn) throws ParserConfigurationException, SAXException, IOException, IncorrectImportPathException, MissedHashTagException, XMLStreamException, WrongSignatureException { return new ValidationProfileParser(resourceFile, isSignCheckOn).profile; } }
validationprofileparser/src/main/java/org/verapdf/validation/profile/parser/ValidationProfileParser.java
package org.verapdf.validation.profile.parser; import org.verapdf.exceptions.validationprofileparser.IncorrectImportPathException; import org.verapdf.exceptions.validationprofileparser.MissedHashTagException; import org.verapdf.exceptions.validationprofileparser.WrongSignatureException; import org.verapdf.validation.profile.model.*; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.SAXException; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import javax.xml.stream.XMLStreamException; import java.io.File; import java.io.IOException; import java.util.*; /** * This class is for parse the validation profile xml file into java classes. * Created by bezrukov on 4/24/15. * * @author Maksim Bezrukov * @version 1.0 */ public final class ValidationProfileParser { private Set<String> profilesPaths; private ValidationProfile profile; private DocumentBuilder builder; private File resource; private ValidationProfileParser(File resourceFile, boolean isSignCheckOn) throws ParserConfigurationException, IOException, SAXException, IncorrectImportPathException, XMLStreamException, MissedHashTagException, WrongSignatureException { resource = resourceFile; if (isSignCheckOn) { ValidationProfileSignatureChecker checker = ValidationProfileSignatureChecker.newInstance(resource); if (!checker.isValidSignature()) { throw new WrongSignatureException("Unsigned validation profile: " + resource.getCanonicalPath()); } } DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); builder = factory.newDocumentBuilder(); factory.setIgnoringElementContentWhitespace(true); Document doc = builder.parse(resource); profilesPaths = new HashSet<>(); profilesPaths.add(resourceFile.getCanonicalPath()); Node root = doc.getDocumentElement(); root.normalize(); parseRoot(root, isSignCheckOn); } private void parseRoot(Node root, boolean isSignCheckOn) throws IOException, SAXException, IncorrectImportPathException { String model = null; String name = null; String description = null; String creator = null; String created = null; String hash = null; Map<String, List<Rule>> rules = new HashMap<>(); Node modelNode = root.getAttributes().getNamedItem("model"); if (modelNode != null){ model = modelNode.getNodeValue(); } NodeList children = root.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("name")) { name = child.getTextContent().trim(); } else if (childName.equals("description")) { description = child.getTextContent().trim(); } else if (childName.equals("creator")) { creator = child.getTextContent().trim(); } else if (childName.equals("created")) { created = child.getTextContent().trim(); } else if (childName.equals("hash")) { hash = child.getTextContent().trim(); } else if (childName.equals("imports")) { parseImports(resource, child, rules); } else if (childName.equals("rules")) { parseRules(child, rules); } } profile = new ValidationProfile(model, name, description, creator, created, hash, rules); } private void parseImports(File sourceFile, Node imports, Map<String, List<Rule>> rules) throws SAXException, IncorrectImportPathException, IOException { NodeList children = imports.getChildNodes(); for(int i = 0; i < children.getLength(); ++i) { Node child = children.item(i); if (!child.getNodeName().equals("import")) { continue; } String path = child.getTextContent().trim(); File newFile = new File(sourceFile.getParent(), path); if (newFile == null || !newFile.exists()){ throw new IncorrectImportPathException("Can not find import with path \"" + path + "\" directly to the given profile."); } if(profilesPaths.contains(newFile.getCanonicalPath())){ continue; } profilesPaths.add(newFile.getCanonicalPath()); Document doc = builder.parse(newFile); NodeList children2 = doc.getDocumentElement().getChildNodes(); for(int j = 0; j < children2.getLength(); ++j){ Node child2 = children2.item(j); String name = child2.getNodeName(); if (name.equals("rules")) { parseRules(child2, rules); } else if (name.equals("imports")) { parseImports(newFile, child2, rules); } } } } private void parseRules(Node rules, Map<String, List<Rule>> rulesMap){ NodeList children = rules.getChildNodes(); for(int i = 0; i < children.getLength(); ++i) { Node child = children.item(i); if (child.getNodeName().equals("rule")) { Rule rule = parseRule(child); if (rulesMap.get(rule.getAttrObject()) == null) { List<Rule> newRules = new ArrayList<>(); rulesMap.put(rule.getAttrObject(), newRules); } rulesMap.get(rule.getAttrObject()).add(rule); } } } private Rule parseRule(Node rule){ String id = null; String object = null; String description = null; String test = null; RuleError ruleError = null; boolean isHasError = false; Reference reference = null; List<Fix> fix = new ArrayList<>(); Node idNode = rule.getAttributes().getNamedItem("id"); if (idNode != null){ id = idNode.getNodeValue(); } Node objectNode = rule.getAttributes().getNamedItem("object"); if (objectNode != null){ object = objectNode.getNodeValue(); } NodeList children = rule.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("description")) { description = child.getTextContent().trim(); } else if (childName.equals("test")) { test = child.getTextContent().trim(); } else if (childName.equals("error")) { ruleError = parseRuleError(child); isHasError = true; } else if (childName.equals("warning")) { ruleError = parseRuleError(child); } else if (childName.equals("reference")) { reference = parseReference(child); } else if (childName.equals("fix")) { fix.add(parseFix(child)); } } return new Rule(id, object, description, ruleError, isHasError, test, reference, fix); } private RuleError parseRuleError(Node err){ String message = null; List<String> argument = new ArrayList<>(); NodeList children = err.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("message")) { message = child.getTextContent().trim(); } else if (childName.equals("argument")) { argument.add(child.getTextContent().trim()); } } return new RuleError(message, argument); } private Reference parseReference(Node ref){ String specification = null; String clause = null; NodeList children = ref.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("specification")) { specification = child.getTextContent().trim(); } else if (childName.equals("clause")) { clause = child.getTextContent().trim(); } } return new Reference(specification, clause); } private Fix parseFix(Node fix){ String id = null; String description = null; FixInfo info = null; FixError error = null; Node idNode = fix.getAttributes().getNamedItem("id"); if (idNode != null){ id = idNode.getNodeValue(); } NodeList children = fix.getChildNodes(); for(int i = 0; i < children.getLength(); ++i){ Node child = children.item(i); String childName = child.getNodeName(); if (childName.equals("description")) { description = child.getTextContent().trim(); } else if (childName.equals("info")) { NodeList nodelist = child.getChildNodes(); for (int j = 0; j < nodelist.getLength(); ++j){ if (nodelist.item(j).getNodeName().equals("message")){ info = new FixInfo(nodelist.item(j).getTextContent().trim()); break; } } } else if (childName.equals("error")) { NodeList nodelist = child.getChildNodes(); for (int j = 0; j < nodelist.getLength(); ++j){ if (nodelist.item(j).getNodeName().equals("message")){ error = new FixError(nodelist.item(j).getTextContent().trim()); break; } } } } return new Fix(id, description, info, error); } /** * Parses validation profile xml. * @param resourcePath - Path to the file for parse. * @return Validation profile represent in Java classes. * @throws ParserConfigurationException - if a DocumentBuilder cannot be created which satisfies the configuration requested. * @throws IOException - if any IO errors occur. * @throws SAXException - if any parse errors occur. * @throws IncorrectImportPathException - if validation profile contains incorrect input path * @throws MissedHashTagException - if validation profile must be signed, but it has no hash tag * @throws XMLStreamException - if exception occurs in parsing a validation profile with xml stream (in checking signature of the validation profile) * @throws WrongSignatureException - if validation profile must be signed, but it has wrong signature */ public static ValidationProfile parseValidationProfile(String resourcePath, boolean isSignCheckOn) throws ParserConfigurationException, SAXException, IOException, IncorrectImportPathException, MissedHashTagException, XMLStreamException, WrongSignatureException { return parseValidationProfile(new File(resourcePath), isSignCheckOn); } /** * * @param resourceFile - File for parse. * @return Validation profile represent in Java classes. * @throws ParserConfigurationException - if a DocumentBuilder cannot be created which satisfies the configuration requested. * @throws IOException - if any IO errors occur. * @throws SAXException - if any parse errors occur. * @throws IncorrectImportPathException - if validation profile contains incorrect input path * @throws MissedHashTagException - if validation profile must be signed, but it has no hash tag * @throws XMLStreamException - if exception occurs in parsing a validation profile with xml stream (in checking signature of the validation profile) * @throws WrongSignatureException - if validation profile must be signed, but it has wrong signature */ public static ValidationProfile parseValidationProfile(File resourceFile, boolean isSignCheckOn) throws ParserConfigurationException, SAXException, IOException, IncorrectImportPathException, MissedHashTagException, XMLStreamException, WrongSignatureException { return new ValidationProfileParser(resourceFile, isSignCheckOn).profile; } }
Revert "Returned hash reading from unsigned profile" This reverts commit 0c5274aa7bf6ed9d115e2c3229dcc063d6893b8f.
validationprofileparser/src/main/java/org/verapdf/validation/profile/parser/ValidationProfileParser.java
Revert "Returned hash reading from unsigned profile"
Java
agpl-3.0
8a01b0949c802801bb39cef69c91f6048c067931
0
Audiveris/audiveris,Audiveris/audiveris
//------------------------------------------------------------------------------------------------// // // // S h a p e B o a r d // // // //------------------------------------------------------------------------------------------------// // <editor-fold defaultstate="collapsed" desc="hdr"> // // Copyright © Audiveris 2017. All rights reserved. // // This program is free software: you can redistribute it and/or modify it under the terms of the // GNU Affero General Public License as published by the Free Software Foundation, either version // 3 of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; // without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // See the GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License along with this // program. If not, see <http://www.gnu.org/licenses/>. //------------------------------------------------------------------------------------------------// // </editor-fold> package org.audiveris.omr.sig.ui; import com.jgoodies.forms.builder.PanelBuilder; import com.jgoodies.forms.layout.CellConstraints; import com.jgoodies.forms.layout.FormLayout; import org.audiveris.omr.OMR; import org.audiveris.omr.constant.Constant; import org.audiveris.omr.constant.ConstantSet; import org.audiveris.omr.glyph.Glyph; import org.audiveris.omr.glyph.Shape; import org.audiveris.omr.glyph.ShapeSet; import org.audiveris.omr.sheet.Sheet; import org.audiveris.omr.sheet.symbol.SymbolFactory; import org.audiveris.omr.ui.Board; import org.audiveris.omr.ui.OmrGlassPane; import org.audiveris.omr.ui.dnd.AbstractGhostDropListener; import org.audiveris.omr.ui.dnd.GhostDropAdapter; import org.audiveris.omr.ui.dnd.GhostDropEvent; import org.audiveris.omr.ui.dnd.GhostDropListener; import org.audiveris.omr.ui.dnd.GhostGlassPane; import org.audiveris.omr.ui.dnd.GhostMotionAdapter; import org.audiveris.omr.ui.dnd.ScreenPoint; import org.audiveris.omr.ui.selection.UserEvent; import org.audiveris.omr.ui.symbol.MusicFont; import org.audiveris.omr.ui.symbol.ShapeSymbol; import org.audiveris.omr.ui.util.Panel; import org.audiveris.omr.ui.view.RubberPanel; import org.audiveris.omr.ui.view.ScrollView; import org.audiveris.omr.ui.view.Zoom; import org.audiveris.omr.util.Navigable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Point; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.image.BufferedImage; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.JButton; /** * Class {@code ShapeBoard} hosts a palette of shapes for insertion and assignment of * inter. * <p> * Shapes are gathered and presented in separate sets that are mutually exclusive. * <p> * A special set of shapes, always visible, is dedicated to the latest shapes used to ease the * repetition of user actions. * <ul> * <li>Direct insertion is performed by drag and drop to the target score view or sheet view</li> * <li>Assignment from an existing glyph is performed by a double-click</li> * </ul> * * @author Hervé Bitteur */ public class ShapeBoard extends Board { //~ Static fields/initializers ----------------------------------------------------------------- private static final Constants constants = new Constants(); private static final Logger logger = LoggerFactory.getLogger(ShapeBoard.class); /** To force the width of the various panels. */ private static final int BOARD_WIDTH = 317; /** * To force the height of the various shape panels. * This is just a dirty hack, to force Swing FlowLayout to wrap its flow. * A better solution might be to use JGoodies Layout, when we have some time to migrate... */ private static final Map<ShapeSet, Integer> heights = new HashMap<ShapeSet, Integer>(); static { heights.put(ShapeSet.Accidentals, 40); heights.put(ShapeSet.Articulations, 40); heights.put(ShapeSet.Attributes, 60); heights.put(ShapeSet.Barlines, 140); heights.put(ShapeSet.BeamsAndTuplets, 60); heights.put(ShapeSet.Clefs, 140); heights.put(ShapeSet.Digits, 40); heights.put(ShapeSet.Dynamics, 70); heights.put(ShapeSet.Flags, 140); heights.put(ShapeSet.Keys, 180); heights.put(ShapeSet.Holds, 40); heights.put(ShapeSet.Markers, 40); heights.put(ShapeSet.HeadsAndDot, 60); heights.put(ShapeSet.Ornaments, 70); heights.put(ShapeSet.Physicals, 70); heights.put(ShapeSet.Pluckings, 40); heights.put(ShapeSet.Rests, 120); heights.put(ShapeSet.Romans, 60); heights.put(ShapeSet.Times, 120); } //~ Instance fields ---------------------------------------------------------------------------- /** Related sheet. */ @Navigable(false) private final Sheet sheet; /** On-going DnD operation, if any. */ private DndOperation dndOperation; /** * Called-back when a set is selected: the panel of shape sets is "replaced" by * the panel of shapes that compose the selected set. */ private final ActionListener setListener = new ActionListener() { @Override public void actionPerformed (ActionEvent e) { // Hide panel of sets setsPanel.setVisible(false); // Show specific panel of shapes String setName = ((JButton) e.getSource()).getName(); ShapeSet set = ShapeSet.getShapeSet(setName); shapesPanel = shapesPanels.get(set); shapesPanel.setVisible(true); resizeBoard(); } }; /** * Called-back when a panel of shapes is closed: the panel is replaced by the * panel of sets to allow the selection of another set. */ private final ActionListener closeListener = new ActionListener() { @Override public void actionPerformed (ActionEvent e) { // Hide current panel of shapes shapesPanel.setVisible(false); // Show panel of sets setsPanel.setVisible(true); resizeBoard(); } }; /** * Called-back when a shape button is (double-) clicked. */ private final MouseListener mouseListener = new MouseAdapter() { // Ability to use the button for direct assignment via double-click @Override public void mouseClicked (MouseEvent e) { if (e.getClickCount() == 2) { Glyph glyph = sheet.getGlyphIndex().getSelectedGlyph(); if (glyph != null) { ShapeButton button = (ShapeButton) e.getSource(); // Actually assign the shape sheet.getInterController().addInter(glyph, button.shape); // Update history shapeHistory.add(button.shape); } } } }; /** Panel of all shape sets. */ private final Panel setsPanel; /** Map of shape panels. */ private final Map<ShapeSet, Panel> shapesPanels = new HashMap<ShapeSet, Panel>(); /** History of recently used shapes. */ private final ShapeHistory shapeHistory; /** Current panel of shapes. */ private Panel shapesPanel; /** GlassPane. */ private final GhostGlassPane glassPane = OMR.gui.getGlassPane(); /** Update image and forward mouse location. */ private final MyMotionAdapter motionAdapter = new MyMotionAdapter(); /** When symbol is dropped. */ private final GhostDropListener<Shape> dropListener = new MyDropListener(); /** When mouse is pressed (start) and released (stop). */ private final MyDropAdapter dropAdapter = new MyDropAdapter(); //~ Constructors ------------------------------------------------------------------------------- /** * Create a new ShapeBoard object. * * @param sheet the related sheet * @param selected true if initially selected */ public ShapeBoard (Sheet sheet, boolean selected) { super(Board.SHAPE, null, null, selected, false, false, false); this.sheet = sheet; dropAdapter.addDropListener(dropListener); shapeHistory = new ShapeHistory(); setsPanel = buildSetsPanel(); defineLayout(); } //~ Methods ------------------------------------------------------------------------------------ //---------// // onEvent // //---------// /** * Unused in this board. * * @param event unused */ @Override public void onEvent (UserEvent event) { // Empty } //------------// // addButtons // //------------// private void addButtons (Panel panel, List<Shape> shapes) { for (Shape shape : shapes) { ShapeButton button = new ShapeButton(shape); button.addMouseListener(mouseListener); // For double-click button.addMouseListener(dropAdapter); // For DnD transfer and double-click button.addMouseMotionListener(motionAdapter); // For dragging panel.add(button); } } //----------------// // buildSetsPanel // //----------------// /** * Build the global panel of sets. * * @return the global panel of sets */ private Panel buildSetsPanel () { Panel panel = new Panel(); panel.setNoInsets(); panel.setPreferredSize(new Dimension(BOARD_WIDTH, 185)); FlowLayout layout = new FlowLayout(); layout.setAlignment(FlowLayout.LEADING); panel.setLayout(layout); panel.setBackground(Color.LIGHT_GRAY); for (ShapeSet set : ShapeSet.getShapeSets()) { Shape rep = set.getRep(); if (rep != null) { JButton button = new JButton(); button.setIcon(rep.getDecoratedSymbol()); button.setName(set.getName()); button.addActionListener(setListener); button.setToolTipText(set.getName()); button.setBorderPainted(false); panel.add(button); // Create the related shapesPanel shapesPanels.put(set, buildShapesPanel(set)); } } return panel; } //------------------// // buildShapesPanel // //------------------// /** * Build the panel of shapes for a given set. * * @param set the given set of shapes * @return the panel of shapes for the provided set */ private Panel buildShapesPanel (ShapeSet set) { Panel panel = new Panel(); panel.setNoInsets(); panel.setPreferredSize(new Dimension(BOARD_WIDTH, getSetHeight(set))); FlowLayout layout = new FlowLayout(); layout.setAlignment(FlowLayout.LEADING); panel.setLayout(layout); // Button to close this shapes panel and return to sets panel JButton close = new JButton(set.getName()); close.addActionListener(closeListener); close.setToolTipText("Back to shape sets"); close.setBorderPainted(false); panel.add(close); // One button per shape addButtons(panel, set.getSortedShapes()); return panel; } //--------------// // defineLayout // //--------------// private void defineLayout () { CellConstraints cst = new CellConstraints(); FormLayout layout = new FormLayout( "190dlu", "pref," + Panel.getFieldInterline() + ",pref"); PanelBuilder builder = new PanelBuilder(layout, getBody()); builder.add(shapeHistory.panel, cst.xy(1, 1)); builder.add(setsPanel, cst.xy(1, 3)); for (Panel shapesPanel : shapesPanels.values()) { builder.add(shapesPanel, cst.xy(1, 3)); // All overlap setsPanel shapesPanel.setVisible(false); } } //--------------// // getIconImage // //--------------// /** * Get the image to draw as an icon for the provided shape. * * @param shape the provided shape * @return an image properly sized for an icon */ private BufferedImage getIconImage (Shape shape) { ShapeSymbol symbol = (shape == Shape.BEAM_HOOK) ? shape.getPhysicalShape().getSymbol() : shape.getDecoratedSymbol(); return symbol.getIconImage(); } //----------------------// // getNonDraggableImage // //----------------------// private BufferedImage getNonDraggableImage (Zoom zoom) { int zoomedInterline = (int) Math.rint(zoom.getRatio() * sheet.getScale().getInterline()); return MusicFont.buildImage(Shape.NON_DRAGGABLE, zoomedInterline, true); // Decorated } //--------------// // getSetHeight // //--------------// /** * Safe method to report the preferred panel height for the provided set. * * @param set provided set * @return preferred height (or a default value) */ private int getSetHeight (ShapeSet set) { Integer height = heights.get(set); if (height == null) { logger.error("No panel height for set {}", set.getName()); height = 100; } return height; } //~ Inner Classes ------------------------------------------------------------------------------ //-------------// // ShapeButton // //-------------// /** * A button dedicated to a shape. */ public static class ShapeButton extends JButton { //~ Instance fields ------------------------------------------------------------------------ final Shape shape; //~ Constructors --------------------------------------------------------------------------- public ShapeButton (Shape shape) { this.shape = shape; setIcon(shape.getDecoratedSymbol()); setName(shape.toString()); setToolTipText(shape.toString()); setBorderPainted(true); } } //-----------// // Constants // //-----------// private static final class Constants extends ConstantSet { //~ Instance fields ------------------------------------------------------------------------ private final Constant.Boolean publishLocationWhileDragging = new Constant.Boolean( false, "Should we publish the current location while dragging a shape?"); private final Constant.Integer maxHistoryLength = new Constant.Integer( "shapes", 8, "Maximum number of shapes kept in history"); } //---------------// // MyDropAdapter // //---------------// /** * DnD adapter called when mouse is pressed and released. */ private class MyDropAdapter extends GhostDropAdapter<Shape> { //~ Constructors --------------------------------------------------------------------------- public MyDropAdapter () { super(ShapeBoard.this.glassPane, null); } //~ Methods -------------------------------------------------------------------------------- public Shape getAction () { return action; } // Start of DnD. (set pay load?) @Override public void mousePressed (MouseEvent e) { // Reset the motion adapter motionAdapter.reset(); ShapeButton button = (ShapeButton) e.getSource(); Shape shape = button.shape; // Set shape & image if (shape.isDraggable()) { // Wait for drag to actually begin... action = shape; image = getIconImage(shape); } else { action = Shape.NON_DRAGGABLE; image = Shape.NON_DRAGGABLE.getSymbol().getIconImage(); ((OmrGlassPane) glassPane).setReference(null); } super.mousePressed(e); } // End of DnD. Reset pay load @Override public void mouseReleased (MouseEvent e) { super.mouseReleased(e); OmrGlassPane glass = (OmrGlassPane) glassPane; glass.setReference(null); dndOperation = null; } } //----------------// // MyDropListener // //----------------// /** * Listener called when DnD shape is dropped. */ private class MyDropListener extends AbstractGhostDropListener<Shape> { //~ Constructors --------------------------------------------------------------------------- public MyDropListener () { // Target will be any view of sheet assembly super(null); } //~ Methods -------------------------------------------------------------------------------- @Override public void dropped (GhostDropEvent<Shape> e) { Shape shape = e.getAction(); if (dndOperation != null) { if (shape != Shape.NON_DRAGGABLE) { ScreenPoint screenPoint = e.getDropLocation(); // The (zoomed) sheet view ScrollView scrollView = sheet.getStub().getAssembly().getSelectedView(); if (screenPoint.isInComponent(scrollView.getComponent().getViewport())) { RubberPanel view = scrollView.getView(); Point localPt = screenPoint.getLocalPoint(view); view.getZoom().unscale(localPt); dndOperation.drop(localPt); // Update history shapeHistory.add(dndOperation.getGhost().getShape()); } } } } } //-----------------// // MyMotionAdapter // //-----------------// /** * Adapter in charge of forwarding the current mouse location and * updating the dragged image according to the target under the mouse. */ private class MyMotionAdapter extends GhostMotionAdapter { //~ Instance fields ------------------------------------------------------------------------ // Optimization: remember the latest component on target private WeakReference<Component> prevComponent; //~ Constructors --------------------------------------------------------------------------- public MyMotionAdapter () { super(ShapeBoard.this.glassPane); reset(); } //~ Methods -------------------------------------------------------------------------------- /** * In this specific implementation, we update the size of the * shape image according to the interline scale and to the * display zoom of the droppable target underneath. * * @param e the mouse event */ @Override public void mouseDragged (MouseEvent e) { final ShapeButton button = (ShapeButton) e.getSource(); final Shape shape = button.shape; final ScreenPoint screenPoint = new ScreenPoint(e.getXOnScreen(), e.getYOnScreen()); final OmrGlassPane glass = (OmrGlassPane) glassPane; // The (zoomed) sheet view ScrollView scrollView = sheet.getStub().getAssembly().getSelectedView(); Component component = scrollView.getComponent().getViewport(); if (screenPoint.isInComponent(component)) { final RubberPanel view = scrollView.getView(); final Zoom zoom = view.getZoom(); final Point localPt = zoom.unscaled(screenPoint.getLocalPoint(view)); glass.setOverTarget(true); // Moving into this component? if (component != prevComponent.get()) { if (shape.isDraggable()) { if (dndOperation == null) { // Set payload dndOperation = new DndOperation( sheet, zoom, SymbolFactory.createManual(shape)); } dndOperation.enteringTarget(); } else { glass.setImage(getNonDraggableImage(zoom)); glass.setReference(null); } prevComponent = new WeakReference<Component>(component); } if (shape.isDraggable()) { // Update reference point Point localRef = dndOperation.getReference(localPt); glass.setReference( (localRef != null) ? new ScreenPoint(view, zoom.scaled(localRef)) : null); } } else if (prevComponent.get() != null) { // No longer on a droppable target, reuse initial image & size glass.setOverTarget(false); glass.setImage(dropAdapter.getImage()); glass.setReference(null); reset(); } glass.setPoint(screenPoint); // This triggers a repaint of glassPane } public final void reset () { prevComponent = new WeakReference<Component>(null); } } //--------------// // ShapeHistory // //--------------// private class ShapeHistory { //~ Instance fields ------------------------------------------------------------------------ private final List<Shape> shapes = new ArrayList<Shape>(); private final Panel panel = new Panel(); //~ Constructors --------------------------------------------------------------------------- public ShapeHistory () { panel.setNoInsets(); FlowLayout layout = new FlowLayout(); layout.setAlignment(FlowLayout.LEADING); panel.setLayout(layout); } //~ Methods -------------------------------------------------------------------------------- public void add (Shape shape) { // Remove duplicate if any shapes.remove(shape); // Insert at beginning of the list shapes.add(0, shape); // Check for maximum length while (shapes.size() > constants.maxHistoryLength.getValue()) { shapes.remove(shapes.size() - 1); } // Regenerate the buttons panel.removeAll(); addButtons(panel, shapes); resizeBoard(); } } }
src/main/org/audiveris/omr/sig/ui/ShapeBoard.java
//------------------------------------------------------------------------------------------------// // // // S h a p e B o a r d // // // //------------------------------------------------------------------------------------------------// // <editor-fold defaultstate="collapsed" desc="hdr"> // // Copyright © Audiveris 2017. All rights reserved. // // This program is free software: you can redistribute it and/or modify it under the terms of the // GNU Affero General Public License as published by the Free Software Foundation, either version // 3 of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; // without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. // See the GNU Affero General Public License for more details. // // You should have received a copy of the GNU Affero General Public License along with this // program. If not, see <http://www.gnu.org/licenses/>. //------------------------------------------------------------------------------------------------// // </editor-fold> package org.audiveris.omr.sig.ui; import com.jgoodies.forms.builder.PanelBuilder; import com.jgoodies.forms.layout.CellConstraints; import com.jgoodies.forms.layout.FormLayout; import org.audiveris.omr.OMR; import org.audiveris.omr.constant.Constant; import org.audiveris.omr.constant.ConstantSet; import org.audiveris.omr.glyph.Glyph; import org.audiveris.omr.glyph.Shape; import org.audiveris.omr.glyph.ShapeSet; import org.audiveris.omr.sheet.Sheet; import org.audiveris.omr.sheet.symbol.SymbolFactory; import org.audiveris.omr.ui.Board; import org.audiveris.omr.ui.OmrGlassPane; import org.audiveris.omr.ui.dnd.AbstractGhostDropListener; import org.audiveris.omr.ui.dnd.GhostDropAdapter; import org.audiveris.omr.ui.dnd.GhostDropEvent; import org.audiveris.omr.ui.dnd.GhostDropListener; import org.audiveris.omr.ui.dnd.GhostGlassPane; import org.audiveris.omr.ui.dnd.GhostMotionAdapter; import org.audiveris.omr.ui.dnd.ScreenPoint; import org.audiveris.omr.ui.selection.UserEvent; import org.audiveris.omr.ui.symbol.MusicFont; import org.audiveris.omr.ui.symbol.ShapeSymbol; import org.audiveris.omr.ui.util.Panel; import org.audiveris.omr.ui.view.RubberPanel; import org.audiveris.omr.ui.view.ScrollView; import org.audiveris.omr.ui.view.Zoom; import org.audiveris.omr.util.Navigable; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.awt.Color; import java.awt.Component; import java.awt.Dimension; import java.awt.FlowLayout; import java.awt.Point; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.event.MouseListener; import java.awt.image.BufferedImage; import java.lang.ref.WeakReference; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import javax.swing.JButton; /** * Class {@code ShapeBoard} hosts a palette of shapes for insertion and assignment of * inter. * <p> * Shapes are gathered and presented in separate sets that are mutually exclusive. * <p> * A special set of shapes, always visible, is dedicated to the latest shapes used to ease the * repetition of user actions. * <ul> * <li>Direct insertion is performed by drag and drop to the target score view or sheet view</li> * <li>Assignment from an existing glyph is performed by a double-click</li> * </ul> * * @author Hervé Bitteur */ public class ShapeBoard extends Board { //~ Static fields/initializers ----------------------------------------------------------------- private static final Constants constants = new Constants(); private static final Logger logger = LoggerFactory.getLogger(ShapeBoard.class); /** To force the width of the various panels. */ private static final int BOARD_WIDTH = 317; /** * To force the height of the various shape panels. * This is just a dirty hack, to force Swing FlowLayout to wrap its flow. * A better solution might be to use JGoodies Layout, when we have some time to migrate... */ private static final Map<ShapeSet, Integer> heights = new HashMap<ShapeSet, Integer>(); static { heights.put(ShapeSet.Accidentals, 40); heights.put(ShapeSet.Articulations, 40); heights.put(ShapeSet.Attributes, 60); heights.put(ShapeSet.Barlines, 140); heights.put(ShapeSet.BeamsAndTuplets, 60); heights.put(ShapeSet.Clefs, 140); heights.put(ShapeSet.Digits, 40); heights.put(ShapeSet.Dynamics, 70); heights.put(ShapeSet.Flags, 140); heights.put(ShapeSet.Keys, 180); heights.put(ShapeSet.Holds, 40); heights.put(ShapeSet.Markers, 40); heights.put(ShapeSet.HeadsAndDot, 60); heights.put(ShapeSet.Ornaments, 70); heights.put(ShapeSet.Physicals, 70); heights.put(ShapeSet.Pluckings, 40); heights.put(ShapeSet.Rests, 120); heights.put(ShapeSet.Romans, 60); heights.put(ShapeSet.Times, 120); } //~ Instance fields ---------------------------------------------------------------------------- /** Related sheet. */ @Navigable(false) private final Sheet sheet; /** On-going DnD operation, if any. */ private DndOperation dndOperation; /** * Called-back when a set is selected: the panel of shape sets is "replaced" by * the panel of shapes that compose the selected set. */ private final ActionListener setListener = new ActionListener() { @Override public void actionPerformed (ActionEvent e) { // Hide panel of sets setsPanel.setVisible(false); // Show specific panel of shapes String setName = ((JButton) e.getSource()).getName(); ShapeSet set = ShapeSet.getShapeSet(setName); shapesPanel = shapesPanels.get(set); shapesPanel.setVisible(true); resizeBoard(); } }; /** * Called-back when a panel of shapes is closed: the panel is replaced by the * panel of sets to allow the selection of another set. */ private final ActionListener closeListener = new ActionListener() { @Override public void actionPerformed (ActionEvent e) { // Hide current panel of shapes shapesPanel.setVisible(false); // Show panel of sets setsPanel.setVisible(true); resizeBoard(); } }; /** * Called-back when a shape button is (double-) clicked. */ private final MouseListener mouseListener = new MouseAdapter() { // Ability to use the button for direct assignment via double-click @Override public void mouseClicked (MouseEvent e) { if (e.getClickCount() == 2) { Glyph glyph = sheet.getGlyphIndex().getSelectedGlyph(); if (glyph != null) { ShapeButton button = (ShapeButton) e.getSource(); // Actually assign the shape sheet.getInterController().addInter(glyph, button.shape); // Update history shapeHistory.add(button.shape); } } } }; /** Panel of all shape sets. */ private final Panel setsPanel; /** Map of shape panels. */ private final Map<ShapeSet, Panel> shapesPanels = new HashMap<ShapeSet, Panel>(); /** History of recently used shapes. */ private final ShapeHistory shapeHistory; /** Current panel of shapes. */ private Panel shapesPanel; /** GlassPane. */ private final GhostGlassPane glassPane = OMR.gui.getGlassPane(); /** Update image and forward mouse location. */ private final MyMotionAdapter motionAdapter = new MyMotionAdapter(); /** When symbol is dropped. */ private final GhostDropListener<Shape> dropListener = new MyDropListener(); /** When mouse is pressed (start) and released (stop). */ private final MyDropAdapter dropAdapter = new MyDropAdapter(); //~ Constructors ------------------------------------------------------------------------------- /** * Create a new ShapeBoard object. * * @param sheet the related sheet * @param selected true if initially selected */ public ShapeBoard (Sheet sheet, boolean selected) { super(Board.SHAPE, null, null, selected, false, false, false); this.sheet = sheet; dropAdapter.addDropListener(dropListener); shapeHistory = new ShapeHistory(); setsPanel = buildSetsPanel(); defineLayout(); } //~ Methods ------------------------------------------------------------------------------------ //---------// // onEvent // //---------// /** * Unused in this board. * * @param event unused */ @Override public void onEvent (UserEvent event) { // Empty } //------------// // addButtons // //------------// private void addButtons (Panel panel, List<Shape> shapes) { for (Shape shape : shapes) { ShapeButton button = new ShapeButton(shape); button.addMouseListener(mouseListener); // For double-click button.addMouseListener(dropAdapter); // For DnD transfer and double-click button.addMouseMotionListener(motionAdapter); // For dragging panel.add(button); } } //----------------// // buildSetsPanel // //----------------// /** * Build the global panel of sets. * * @return the global panel of sets */ private Panel buildSetsPanel () { Panel panel = new Panel(); panel.setNoInsets(); panel.setPreferredSize(new Dimension(BOARD_WIDTH, 160)); FlowLayout layout = new FlowLayout(); layout.setAlignment(FlowLayout.LEADING); panel.setLayout(layout); panel.setBackground(Color.LIGHT_GRAY); for (ShapeSet set : ShapeSet.getShapeSets()) { Shape rep = set.getRep(); if (rep != null) { JButton button = new JButton(); button.setIcon(rep.getDecoratedSymbol()); button.setName(set.getName()); button.addActionListener(setListener); button.setToolTipText(set.getName()); button.setBorderPainted(false); panel.add(button); // Create the related shapesPanel shapesPanels.put(set, buildShapesPanel(set)); } } return panel; } //------------------// // buildShapesPanel // //------------------// /** * Build the panel of shapes for a given set. * * @param set the given set of shapes * @return the panel of shapes for the provided set */ private Panel buildShapesPanel (ShapeSet set) { Panel panel = new Panel(); panel.setNoInsets(); panel.setPreferredSize(new Dimension(BOARD_WIDTH, getSetHeight(set))); FlowLayout layout = new FlowLayout(); layout.setAlignment(FlowLayout.LEADING); panel.setLayout(layout); // Button to close this shapes panel and return to sets panel JButton close = new JButton(set.getName()); close.addActionListener(closeListener); close.setToolTipText("Back to shape sets"); close.setBorderPainted(false); panel.add(close); // One button per shape addButtons(panel, set.getSortedShapes()); return panel; } //--------------// // defineLayout // //--------------// private void defineLayout () { CellConstraints cst = new CellConstraints(); FormLayout layout = new FormLayout( "190dlu", "pref," + Panel.getFieldInterline() + ",pref"); PanelBuilder builder = new PanelBuilder(layout, getBody()); builder.add(shapeHistory.panel, cst.xy(1, 1)); builder.add(setsPanel, cst.xy(1, 3)); for (Panel shapesPanel : shapesPanels.values()) { builder.add(shapesPanel, cst.xy(1, 3)); // All overlap setsPanel shapesPanel.setVisible(false); } } //--------------// // getIconImage // //--------------// /** * Get the image to draw as an icon for the provided shape. * * @param shape the provided shape * @return an image properly sized for an icon */ private BufferedImage getIconImage (Shape shape) { ShapeSymbol symbol = (shape == Shape.BEAM_HOOK) ? shape.getPhysicalShape().getSymbol() : shape.getDecoratedSymbol(); return symbol.getIconImage(); } //----------------------// // getNonDraggableImage // //----------------------// private BufferedImage getNonDraggableImage (Zoom zoom) { int zoomedInterline = (int) Math.rint(zoom.getRatio() * sheet.getScale().getInterline()); return MusicFont.buildImage(Shape.NON_DRAGGABLE, zoomedInterline, true); // Decorated } //--------------// // getSetHeight // //--------------// /** * Safe method to report the preferred panel height for the provided set. * * @param set provided set * @return preferred height (or a default value) */ private int getSetHeight (ShapeSet set) { Integer height = heights.get(set); if (height == null) { logger.error("No panel height for set {}", set.getName()); height = 100; } return height; } //~ Inner Classes ------------------------------------------------------------------------------ //-------------// // ShapeButton // //-------------// /** * A button dedicated to a shape. */ public static class ShapeButton extends JButton { //~ Instance fields ------------------------------------------------------------------------ final Shape shape; //~ Constructors --------------------------------------------------------------------------- public ShapeButton (Shape shape) { this.shape = shape; setIcon(shape.getDecoratedSymbol()); setName(shape.toString()); setToolTipText(shape.toString()); setBorderPainted(true); } } //-----------// // Constants // //-----------// private static final class Constants extends ConstantSet { //~ Instance fields ------------------------------------------------------------------------ private final Constant.Boolean publishLocationWhileDragging = new Constant.Boolean( false, "Should we publish the current location while dragging a shape?"); private final Constant.Integer maxHistoryLength = new Constant.Integer( "shapes", 8, "Maximum number of shapes kept in history"); } //---------------// // MyDropAdapter // //---------------// /** * DnD adapter called when mouse is pressed and released. */ private class MyDropAdapter extends GhostDropAdapter<Shape> { //~ Constructors --------------------------------------------------------------------------- public MyDropAdapter () { super(ShapeBoard.this.glassPane, null); } //~ Methods -------------------------------------------------------------------------------- public Shape getAction () { return action; } // Start of DnD. (set pay load?) @Override public void mousePressed (MouseEvent e) { // Reset the motion adapter motionAdapter.reset(); ShapeButton button = (ShapeButton) e.getSource(); Shape shape = button.shape; // Set shape & image if (shape.isDraggable()) { // Wait for drag to actually begin... action = shape; image = getIconImage(shape); } else { action = Shape.NON_DRAGGABLE; image = Shape.NON_DRAGGABLE.getSymbol().getIconImage(); ((OmrGlassPane) glassPane).setReference(null); } super.mousePressed(e); } // End of DnD. Reset pay load @Override public void mouseReleased (MouseEvent e) { super.mouseReleased(e); OmrGlassPane glass = (OmrGlassPane) glassPane; glass.setReference(null); dndOperation = null; } } //----------------// // MyDropListener // //----------------// /** * Listener called when DnD shape is dropped. */ private class MyDropListener extends AbstractGhostDropListener<Shape> { //~ Constructors --------------------------------------------------------------------------- public MyDropListener () { // Target will be any view of sheet assembly super(null); } //~ Methods -------------------------------------------------------------------------------- @Override public void dropped (GhostDropEvent<Shape> e) { Shape shape = e.getAction(); if (dndOperation != null) { if (shape != Shape.NON_DRAGGABLE) { ScreenPoint screenPoint = e.getDropLocation(); // The (zoomed) sheet view ScrollView scrollView = sheet.getStub().getAssembly().getSelectedView(); if (screenPoint.isInComponent(scrollView.getComponent().getViewport())) { RubberPanel view = scrollView.getView(); Point localPt = screenPoint.getLocalPoint(view); view.getZoom().unscale(localPt); dndOperation.drop(localPt); // Update history shapeHistory.add(dndOperation.getGhost().getShape()); } } } } } //-----------------// // MyMotionAdapter // //-----------------// /** * Adapter in charge of forwarding the current mouse location and * updating the dragged image according to the target under the mouse. */ private class MyMotionAdapter extends GhostMotionAdapter { //~ Instance fields ------------------------------------------------------------------------ // Optimization: remember the latest component on target private WeakReference<Component> prevComponent; //~ Constructors --------------------------------------------------------------------------- public MyMotionAdapter () { super(ShapeBoard.this.glassPane); reset(); } //~ Methods -------------------------------------------------------------------------------- /** * In this specific implementation, we update the size of the * shape image according to the interline scale and to the * display zoom of the droppable target underneath. * * @param e the mouse event */ @Override public void mouseDragged (MouseEvent e) { final ShapeButton button = (ShapeButton) e.getSource(); final Shape shape = button.shape; final ScreenPoint screenPoint = new ScreenPoint(e.getXOnScreen(), e.getYOnScreen()); final OmrGlassPane glass = (OmrGlassPane) glassPane; // The (zoomed) sheet view ScrollView scrollView = sheet.getStub().getAssembly().getSelectedView(); Component component = scrollView.getComponent().getViewport(); if (screenPoint.isInComponent(component)) { final RubberPanel view = scrollView.getView(); final Zoom zoom = view.getZoom(); final Point localPt = zoom.unscaled(screenPoint.getLocalPoint(view)); glass.setOverTarget(true); // Moving into this component? if (component != prevComponent.get()) { if (shape.isDraggable()) { if (dndOperation == null) { // Set payload dndOperation = new DndOperation( sheet, zoom, SymbolFactory.createManual(shape)); } dndOperation.enteringTarget(); } else { glass.setImage(getNonDraggableImage(zoom)); glass.setReference(null); } prevComponent = new WeakReference<Component>(component); } if (shape.isDraggable()) { // Update reference point Point localRef = dndOperation.getReference(localPt); glass.setReference( (localRef != null) ? new ScreenPoint(view, zoom.scaled(localRef)) : null); } } else if (prevComponent.get() != null) { // No longer on a droppable target, reuse initial image & size glass.setOverTarget(false); glass.setImage(dropAdapter.getImage()); glass.setReference(null); reset(); } glass.setPoint(screenPoint); // This triggers a repaint of glassPane } public final void reset () { prevComponent = new WeakReference<Component>(null); } } //--------------// // ShapeHistory // //--------------// private class ShapeHistory { //~ Instance fields ------------------------------------------------------------------------ private final List<Shape> shapes = new ArrayList<Shape>(); private final Panel panel = new Panel(); //~ Constructors --------------------------------------------------------------------------- public ShapeHistory () { panel.setNoInsets(); FlowLayout layout = new FlowLayout(); layout.setAlignment(FlowLayout.LEADING); panel.setLayout(layout); } //~ Methods -------------------------------------------------------------------------------- public void add (Shape shape) { // Remove duplicate if any shapes.remove(shape); // Insert at beginning of the list shapes.add(0, shape); // Check for maximum length while (shapes.size() > constants.maxHistoryLength.getValue()) { shapes.remove(shapes.size() - 1); } // Regenerate the buttons panel.removeAll(); addButtons(panel, shapes); resizeBoard(); } } }
More room for shape sets panel
src/main/org/audiveris/omr/sig/ui/ShapeBoard.java
More room for shape sets panel
Java
agpl-3.0
e8181999340b73e621f14e96d44dfa97cf00bc99
0
MusesProject/MusesServer,MusesProject/MusesServer
/* * version 1.0 - MUSES prototype software * Copyright MUSES project (European Commission FP7) - 2013 * */ package eu.musesproject.server.connectionmanager; import java.io.IOException; import java.io.PrintWriter; import java.util.Queue; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * Class ComMainServlet * * @author Yasir Ali * @version Jan 27, 2014 */ public class ComMainServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static Logger logger = Logger.getLogger(ComMainServlet.class.getName()); private Helper helper; private SessionHandler sessionHandler; private ConnectionManager connectionManager; private String dataAttachedInCurrentReuqest; private String dataToSendBackInResponse=""; private static final String DATA = "data"; private static final int INTERVAL_TO_WAIT = 5; private static final long SLEEP_INTERVAL = 1000; private static final String MUSES_TAG = "MUSES_TAG"; private static final String MUSES_TAG_LEVEL_2 = "MUSES_TAG_LEVEL_2"; /** * * @param sessionHandler * @param helper * @param communicationManager */ public ComMainServlet(SessionHandler sessionHandler, Helper helper, ConnectionManager communicationManager){ this.sessionHandler=sessionHandler; this.helper=helper; this.connectionManager=communicationManager; } public ComMainServlet() { } /** * Initialize servlet * * @throws Servlet exception */ @Override public void init() throws ServletException { super.init(); logger.log(Level.INFO, MUSES_TAG + " init"); helper = new Helper(); connectionManager = ConnectionManager.getInstance(); sessionHandler = SessionHandler.getInstance(getServletContext()); } /** * Handle POST http/https requests * @param HttpServletRequest request * @param com.swedenconnectivity.comserverHttpServletResponse response * @throws ServletException, IOException */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Retrieve value from request header String connectionType = request.getHeader("connection-type"); // create cookie if not in the request helper.setCookie(request); Cookie cookie = helper.getCookie(); String currentJSessionID = cookie.getValue(); // Retrieve data in the request dataAttachedInCurrentReuqest = helper.getRequestData(request); // if "connect" request if (connectionType!=null && connectionType.equalsIgnoreCase(RequestType.CONNECT)) { logger.log(Level.INFO, MUSES_TAG + " Request type:"+connectionType+" with *ID*: "+currentJSessionID+ " with **dataInRequest**: "+dataAttachedInCurrentReuqest); } // if "send-data" request if (connectionType!=null && connectionType.equalsIgnoreCase(RequestType.DATA)) { // Callback the FL to receive data from the client and get the response data back into string dataToSendBackInResponse=""; if (dataAttachedInCurrentReuqest != null){ dataToSendBackInResponse = ConnectionManager.toReceive(currentJSessionID, dataAttachedInCurrentReuqest); // FIXME needs to be tested properly if (dataToSendBackInResponse == null) { dataToSendBackInResponse = ""; } } if (dataToSendBackInResponse.equals("")) { dataToSendBackInResponse = waitForDataIfAvailable(INTERVAL_TO_WAIT, currentJSessionID); } response.setHeader("Content-Type", "text/plain"); PrintWriter writer = response.getWriter(); writer.write(dataToSendBackInResponse); //response.addHeader(DATA,dataToSendBackInResponse); // Now data is added in the body instead logger.log(Level.INFO, MUSES_TAG + " Data avaialble Request type:"+connectionType+" with *ID*: "+currentJSessionID+ " with **dataInResponse**: "+dataToSendBackInResponse); } // if "poll" request if (connectionType!= null && connectionType.equalsIgnoreCase(RequestType.POLL)) { for (DataHandler dataHandler : connectionManager.getDataHandlerQueue()){ // FIXME concurrent thread if (dataHandler.getSessionId().equalsIgnoreCase(currentJSessionID)){ dataToSendBackInResponse = dataHandler.getData(); response.setHeader("Content-Type", "text/plain"); PrintWriter writer = response.getWriter(); writer.write(dataToSendBackInResponse); response.addHeader(DATA,dataToSendBackInResponse); connectionManager.removeDataHandler(dataHandler); Queue<DataHandler> dQueue = connectionManager.getDataHandlerQueue(); if (dQueue.size() > 1) { response.addHeader("more-packets", "YES"); }else { response.addHeader("more-packets", "NO"); } logger.log(Level.INFO, "Data avaialble Request type:"+connectionType+" with *ID*: "+currentJSessionID+ " with **dataInResponse**: "+dataToSendBackInResponse); break; // FIXME temporary as multiple same session ids are in the list right now } } } // if "ack" request if (connectionType!=null && connectionType.equalsIgnoreCase(RequestType.ACK)) { logger.log(Level.INFO, "Request type:"+connectionType+" with *ID*: "+currentJSessionID); // Clean up the data handler object from the list connectionManager.removeDataHandler(connectionManager.getDataHandlerObject(currentJSessionID)); ConnectionManager.toSessionCb(currentJSessionID, Statuses.DATA_SENT_SUCCESFULLY); } // if disconnect request // invalidate session from Servlet // remove it from the session id list // Callback the Functional layer about the disconnect if (connectionType!= null && connectionType.equalsIgnoreCase(RequestType.DISCONNECT)) { logger.log(Level.INFO, "Request type:"+connectionType+" with *ID*: "+currentJSessionID); helper.disconnect(request); sessionHandler.removeCookieToList(cookie); ConnectionManager.toSessionCb(currentJSessionID, Statuses.DISCONNECTED); } // Add session id to the List if (currentJSessionID != null && !connectionType.equalsIgnoreCase(RequestType.DISCONNECT) ) { sessionHandler.addCookieToList(cookie); } // Setup response to send back logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Data for response: " + dataToSendBackInResponse + "for id: " + currentJSessionID); response.setContentType("text/html"); response.addCookie(cookie); } public String getResponseData(){ return dataToSendBackInResponse; } public String waitForDataIfAvailable(int timeout, String currentJSessionID){ int i=1; while(i<=timeout){ Queue<DataHandler> dQueue = connectionManager.getDataHandlerQueue(); logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Current Data queue size is: " +dQueue.size()); logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Current JSessionID: " + currentJSessionID); if (dQueue.size()>=1) { logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Looping the queue.."); for (DataHandler dataHandler : connectionManager.getDataHandlerQueue()){ // FIXME concurrent thread logger.log(Level.INFO, MUSES_TAG_LEVEL_2+ " SessionId in queue:"+ dataHandler.getSessionId()); if (dataHandler.getSessionId().equalsIgnoreCase(currentJSessionID)){ logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " SessionIds matched, sending response back"); connectionManager.removeDataHandler(dataHandler); dataToSendBackInResponse = dataHandler.getData(); return dataHandler.getData(); } } } sleep(SLEEP_INTERVAL); i++; } return ""; } private void sleep(long millis){ try { Thread.sleep(millis); } catch (InterruptedException e) { logger.log(Level.INFO, e); } } /** * Handle GET http/https requests // Muses will not use this method * @param HttpServletRequest request * @param HttpServletResponse response * @throws ServletException, IOException */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { } }
src/main/java/eu/musesproject/server/connectionmanager/ComMainServlet.java
/* * version 1.0 - MUSES prototype software * Copyright MUSES project (European Commission FP7) - 2013 * */ package eu.musesproject.server.connectionmanager; import java.io.IOException; import java.io.PrintWriter; import java.util.Queue; import javax.servlet.Servlet; import javax.servlet.ServletException; import javax.servlet.http.Cookie; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.log4j.Level; import org.apache.log4j.Logger; /** * Class ComMainServlet * * @author Yasir Ali * @version Jan 27, 2014 */ public class ComMainServlet extends HttpServlet { private static final long serialVersionUID = 1L; private static Logger logger = Logger.getLogger(ComMainServlet.class.getName()); private Helper helper; private SessionHandler sessionHandler; private ConnectionManager connectionManager; private String dataAttachedInCurrentReuqest; private String dataToSendBackInResponse=""; private static final String DATA = "data"; private static final int INTERVAL_TO_WAIT = 5; private static final long SLEEP_INTERVAL = 1000; private static final String MUSES_TAG = "MUSES_TAG"; private static final String MUSES_TAG_LEVEL_2 = "MUSES_TAG_LEVEL_2"; /** * * @param sessionHandler * @param helper * @param communicationManager */ public ComMainServlet(SessionHandler sessionHandler, Helper helper, ConnectionManager communicationManager){ this.sessionHandler=sessionHandler; this.helper=helper; this.connectionManager=communicationManager; } public ComMainServlet() { } /** * Initialize servlet * * @throws Servlet exception */ @Override public void init() throws ServletException { super.init(); logger.log(Level.INFO, MUSES_TAG + " init"); helper = new Helper(); connectionManager = ConnectionManager.getInstance(); sessionHandler = SessionHandler.getInstance(getServletContext()); } /** * Handle POST http/https requests * @param HttpServletRequest request * @param com.swedenconnectivity.comserverHttpServletResponse response * @throws ServletException, IOException */ @Override protected void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { // Retrieve value from request header String connectionType = request.getHeader("connection-type"); // create cookie if not in the request helper.setCookie(request); Cookie cookie = helper.getCookie(); String currentJSessionID = cookie.getValue(); // Retrieve data in the request dataAttachedInCurrentReuqest = helper.getRequestData(request); // if "connect" request if (connectionType!=null && connectionType.equalsIgnoreCase(RequestType.CONNECT)) { logger.log(Level.INFO, MUSES_TAG + " Request type:"+connectionType+" with *ID*: "+currentJSessionID+ " with **dataInRequest**: "+dataAttachedInCurrentReuqest); } // if "send-data" request if (connectionType!=null && connectionType.equalsIgnoreCase(RequestType.DATA)) { // Callback the FL to receive data from the client and get the response data back into string dataToSendBackInResponse=""; if (dataAttachedInCurrentReuqest != null){ dataToSendBackInResponse = ConnectionManager.toReceive(currentJSessionID, dataAttachedInCurrentReuqest); // FIXME needs to be tested properly if (dataToSendBackInResponse == null) { dataToSendBackInResponse = ""; } } if (dataToSendBackInResponse.equals("")) { dataToSendBackInResponse = waitForDataIfAvailable(INTERVAL_TO_WAIT, currentJSessionID); } response.setHeader("Content-Type", "text/plain"); PrintWriter writer = response.getWriter(); writer.write(dataToSendBackInResponse); response.addHeader(DATA,dataToSendBackInResponse); logger.log(Level.INFO, MUSES_TAG + " Data avaialble Request type:"+connectionType+" with *ID*: "+currentJSessionID+ " with **dataInResponse**: "+dataToSendBackInResponse); } // if "poll" request if (connectionType!= null && connectionType.equalsIgnoreCase(RequestType.POLL)) { for (DataHandler dataHandler : connectionManager.getDataHandlerQueue()){ // FIXME concurrent thread if (dataHandler.getSessionId().equalsIgnoreCase(currentJSessionID)){ dataToSendBackInResponse = dataHandler.getData(); response.setHeader("Content-Type", "text/plain"); PrintWriter writer = response.getWriter(); writer.write(dataToSendBackInResponse); response.addHeader(DATA,dataToSendBackInResponse); connectionManager.removeDataHandler(dataHandler); Queue<DataHandler> dQueue = connectionManager.getDataHandlerQueue(); if (dQueue.size() > 1) { response.addHeader("more-packets", "YES"); }else { response.addHeader("more-packets", "NO"); } logger.log(Level.INFO, "Data avaialble Request type:"+connectionType+" with *ID*: "+currentJSessionID+ " with **dataInResponse**: "+dataToSendBackInResponse); break; // FIXME temporary as multiple same session ids are in the list right now } } } // if "ack" request if (connectionType!=null && connectionType.equalsIgnoreCase(RequestType.ACK)) { logger.log(Level.INFO, "Request type:"+connectionType+" with *ID*: "+currentJSessionID); // Clean up the data handler object from the list connectionManager.removeDataHandler(connectionManager.getDataHandlerObject(currentJSessionID)); ConnectionManager.toSessionCb(currentJSessionID, Statuses.DATA_SENT_SUCCESFULLY); } // if disconnect request // invalidate session from Servlet // remove it from the session id list // Callback the Functional layer about the disconnect if (connectionType!= null && connectionType.equalsIgnoreCase(RequestType.DISCONNECT)) { logger.log(Level.INFO, "Request type:"+connectionType+" with *ID*: "+currentJSessionID); helper.disconnect(request); sessionHandler.removeCookieToList(cookie); ConnectionManager.toSessionCb(currentJSessionID, Statuses.DISCONNECTED); } // Add session id to the List if (currentJSessionID != null && !connectionType.equalsIgnoreCase(RequestType.DISCONNECT) ) { sessionHandler.addCookieToList(cookie); } // Setup response to send back logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Data for response: " + dataToSendBackInResponse + "for id: " + currentJSessionID); response.setContentType("text/html"); response.addCookie(cookie); } public String getResponseData(){ return dataToSendBackInResponse; } public String waitForDataIfAvailable(int timeout, String currentJSessionID){ int i=1; while(i<=timeout){ Queue<DataHandler> dQueue = connectionManager.getDataHandlerQueue(); logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Current Data queue size is: " +dQueue.size()); logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Current JSessionID: " + currentJSessionID); if (dQueue.size()>=1) { logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " Looping the queue.."); for (DataHandler dataHandler : connectionManager.getDataHandlerQueue()){ // FIXME concurrent thread logger.log(Level.INFO, MUSES_TAG_LEVEL_2+ " SessionId in queue:"+ dataHandler.getSessionId()); if (dataHandler.getSessionId().equalsIgnoreCase(currentJSessionID)){ logger.log(Level.INFO, MUSES_TAG_LEVEL_2 + " SessionIds matched, sending response back"); connectionManager.removeDataHandler(dataHandler); dataToSendBackInResponse = dataHandler.getData(); return dataHandler.getData(); } } } sleep(SLEEP_INTERVAL); i++; } return ""; } private void sleep(long millis){ try { Thread.sleep(millis); } catch (InterruptedException e) { logger.log(Level.INFO, e); } } /** * Handle GET http/https requests // Muses will not use this method * @param HttpServletRequest request * @param HttpServletResponse response * @throws ServletException, IOException */ @Override protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException { } }
data header removed instead data is sent in body of response data header removed instead data is sent in body of response
src/main/java/eu/musesproject/server/connectionmanager/ComMainServlet.java
data header removed instead data is sent in body of response
Java
agpl-3.0
bfa2fcf2ff2c4e32e25ac483115e2aeda6b79786
0
Shoebill/streamer-wrapper
package net.gtaun.shoebill.streamer; import net.gtaun.shoebill.Shoebill; import net.gtaun.shoebill.amx.AmxCallable; import net.gtaun.shoebill.amx.AmxInstance; import net.gtaun.shoebill.amx.types.ReferenceFloat; import net.gtaun.shoebill.amx.types.ReferenceInt; import net.gtaun.shoebill.amx.types.ReferenceString; import net.gtaun.shoebill.constant.ObjectMaterialSize; import net.gtaun.shoebill.data.Color; import net.gtaun.shoebill.data.Location; import net.gtaun.shoebill.data.Vector3D; import net.gtaun.shoebill.event.amx.AmxLoadEvent; import net.gtaun.shoebill.object.Player; import net.gtaun.shoebill.streamer.data.*; import net.gtaun.util.event.EventManager; import net.gtaun.util.event.EventManagerNode; /** * Created by marvin on 23.07.15 in project streamer-wrapper. * Copyright (c) 2015 Marvin Haschker. All rights reserved. */ public class Functions { private static EventManagerNode eventManagerNode; //Objects: private static AmxCallable createDynamicObject; private static AmxCallable destroyDynamicObject; private static AmxCallable isValidDynamicObject; private static AmxCallable setDynamicObjectPos; private static AmxCallable getDynamicObjectPos; private static AmxCallable setDynamicObjectRot; private static AmxCallable getDynamicObjectRot; private static AmxCallable moveDynamicObject; private static AmxCallable stopDynamicObject; private static AmxCallable isDynamicObjectMoving; private static AmxCallable attachCameraToDynamicObject; private static AmxCallable attachDynamicObjectToObject; private static AmxCallable attachDynamicObjectToPlayer; private static AmxCallable attachDynamicObjectToVehicle; private static AmxCallable editDynamicObject; private static AmxCallable isDynamicObjectMaterialUsed; private static AmxCallable getDynamicObjectMaterial; private static AmxCallable setDynamicObjectMaterial; private static AmxCallable isDynamicObjectMaterialTextUsed; private static AmxCallable getDynamicObjectMaterialText; private static AmxCallable setDynamicObjectMaterialText; //Pickups: private static AmxCallable createDynamicPickup; private static AmxCallable destroyDynamicPickup; private static AmxCallable isValidDynamicPickup; //3DTextLabels: private static AmxCallable createDynamic3DTextLabel; private static AmxCallable destroyDynamic3DTextLabel; private static AmxCallable isValidDynamic3DTextLabel; private static AmxCallable getDynamic3DTextLabelText; private static AmxCallable updateDynamic3DTextLabelText; //Streamer: private static AmxCallable update; private static AmxCallable updateEx; public static void registerHandlers(EventManager eventManager) { eventManagerNode = eventManager.createChildNode(); AmxInstance amxInstance = AmxInstance.getDefault(); findObjectFunctions(amxInstance); findPickupFunctions(amxInstance); find3DTextLabelFunctions(amxInstance); findStreamerFunctions(amxInstance); } public static void unregisterHandlers() { eventManagerNode.cancelAll(); eventManagerNode.destroy(); eventManagerNode = null; } private static void findObjectFunctions(AmxInstance instance) { AmxCallable tickFunc = instance.getNative("Streamer_GetTickRate"); if(tickFunc != null && createDynamicObject == null) { createDynamicObject = instance.getNative("CreateDynamicObject"); destroyDynamicObject = instance.getNative("DestroyDynamicObject"); isValidDynamicObject = instance.getNative("IsValidDynamicObject"); setDynamicObjectPos = instance.getNative("SetDynamicObjectPos"); getDynamicObjectPos = instance.getNative("GetDynamicObjectPos"); setDynamicObjectRot = instance.getNative("SetDynamicObjectRot"); getDynamicObjectRot = instance.getNative("GetDynamicObjectRot"); moveDynamicObject = instance.getNative("MoveDynamicObject"); stopDynamicObject = instance.getNative("StopDynamicObject"); isDynamicObjectMoving = instance.getNative("IsDynamicObjectMoving"); attachCameraToDynamicObject = instance.getNative("AttachCameraToDynamicObject"); attachDynamicObjectToObject = instance.getNative("AttachDynamicObjectToObject"); attachDynamicObjectToPlayer = instance.getNative("AttachDynamicObjectToPlayer"); attachDynamicObjectToVehicle = instance.getNative("AttachDynamicObjectToVehicle"); editDynamicObject = instance.getNative("EditDynamicObject"); isDynamicObjectMaterialUsed = instance.getNative("IsDynamicObjectMaterialUsed"); getDynamicObjectMaterial = instance.getNative("GetDynamicObjectMaterial"); setDynamicObjectMaterial = instance.getNative("SetDynamicObjectMaterial"); isDynamicObjectMaterialTextUsed = instance.getNative("IsDynamicObjectMaterialTextUsed"); getDynamicObjectMaterialText = instance.getNative("GetDynamicObjectMaterialText"); setDynamicObjectMaterialText = instance.getNative("SetDynamicObjectMaterialText"); } } private static void findPickupFunctions(AmxInstance instance) { if (createDynamicPickup == null) { createDynamicPickup = instance.getNative("CreateDynamicPickup"); destroyDynamicPickup = instance.getNative("DestroyDynamicPickup"); isValidDynamicPickup = instance.getNative("IsValidDynamicPickup"); } } private static void find3DTextLabelFunctions(AmxInstance instance) { if (createDynamic3DTextLabel == null) { createDynamic3DTextLabel = instance.getNative("CreateDynamic3DTextLabel"); destroyDynamic3DTextLabel = instance.getNative("DestroyDynamic3DTextLabel"); isValidDynamic3DTextLabel = instance.getNative("IsValidDynamic3DTextLabel"); getDynamic3DTextLabelText = instance.getNative("GetDynamic3DTextLabelText"); updateDynamic3DTextLabelText = instance.getNative("UpdateDynamic3DTextLabelText"); } } private static void findStreamerFunctions(AmxInstance instance) { update = instance.getNative("Streamer_Update"); updateEx = instance.getNative("Streamer_UpdateEx"); } public static DynamicObject createDynamicObject(int modelid, Location location, Vector3D rotation) { return createDynamicObject(modelid, location, rotation, DynamicObject.DEFAULT_STREAM_DISTANCE, DynamicObject.DEFAULT_DRAW_DISTANCE); } public static DynamicObject createDynamicObject(int modelid, Location location, Vector3D rotation, float streamDistance, float drawDistance) { return createDynamicObject(modelid, location, rotation, -1, streamDistance, drawDistance); } public static DynamicObject createDynamicObject(int modelid, Location location, Vector3D rotation, int playerid, float streamDistance, float drawDistance) { return createDynamicObject(modelid, location.x, location.y, location.z, rotation.x, rotation.y, rotation.z, location.worldId, location.interiorId, playerid, streamDistance, drawDistance); } public static DynamicObject createDynamicObject(int modelid, float x, float y, float z, float rX, float rY, float rZ, int worldId, int interiorId, int playerId, float streamDistance, float drawDistance) { createDynamicObject = Shoebill.get().getAmxInstanceManager().getAmxInstances().iterator().next().getNative("CreateDynamicObject"); int id = (int)createDynamicObject.call(modelid, x, y, z, rX, rY, rZ, worldId, interiorId, playerId, streamDistance, drawDistance); return new DynamicObject(id, modelid, playerId, streamDistance, drawDistance); } public static void destroyDynamicObject(DynamicObject object) { destroyDynamicObject(object.getId()); } public static void destroyDynamicObject(int id) { destroyDynamicObject.call(id); } public static boolean isValidDynamicObject(DynamicObject object) { return isValidDynamicObject(object.getId()); } public static boolean isValidDynamicObject(int id) { return (int) isValidDynamicObject.call(id) > 0; } public static void setDynamicObjectPos(DynamicObject object, Vector3D pos) { setDynamicObjectPos(object.getId(), pos); } public static void setDynamicObjectPos(int id, Vector3D pos) { setDynamicObjectPos(id, pos.x, pos.y, pos.z); } public static void setDynamicObjectPos(int id, float x, float y, float z) { setDynamicObjectPos.call(id, x, y, z); } public static Vector3D getDynamicObjectPos(DynamicObject object) { return getDynamicObjectPos(object.getId()); } public static Vector3D getDynamicObjectPos(int id) { ReferenceFloat refX = new ReferenceFloat(0.0f); ReferenceFloat refY = new ReferenceFloat(0.0f); ReferenceFloat refZ = new ReferenceFloat(0.0f); getDynamicObjectPos.call(id, refX, refY, refZ); return new Vector3D(refX.getValue(), refY.getValue(), refZ.getValue()); } public static void setDynamicObjectRot(DynamicObject object, Vector3D rot) { setDynamicObjectRot(object.getId(), rot); } public static void setDynamicObjectRot(int id, Vector3D rot) { setDynamicObjectRot(id, rot.x, rot.y, rot.z); } public static void setDynamicObjectRot(int id, float x, float y, float z) { setDynamicObjectRot.call(id, x, y, z); } public static Vector3D getDynamicObjectRot(DynamicObject object) { return getDynamicObjectRot(object.getId()); } public static Vector3D getDynamicObjectRot(int id) { ReferenceFloat refX = new ReferenceFloat(0.0f); ReferenceFloat refY = new ReferenceFloat(0.0f); ReferenceFloat refZ = new ReferenceFloat(0.0f); getDynamicObjectRot.call(id, refX, refY, refZ); return new Vector3D(refX.getValue(), refY.getValue(), refZ.getValue()); } public static void moveDynamicObject(int id, Vector3D newPos, float speed, Vector3D newRot) { moveDynamicObject.call(id, newPos.x, newPos.y, newPos.z, speed, newRot.x, newRot.y, newRot.z); } public static void stopDynamicObject(int id) { stopDynamicObject.call(id); } public static boolean isDynamicObjectMoving(int id) { return (int)isDynamicObjectMoving.call(id) > 0; } public static void attachCameraToDynamicObject(int playerid, int objectId) { attachCameraToDynamicObject.call(playerid, objectId); } public static void attachDynamicObjectToObject(int object, int toObject, float offsetX, float offsetY, float offsetZ, float rotX, float rotY, float rotZ, boolean syncRotation) { attachDynamicObjectToObject.call(object, toObject, offsetX, offsetY, offsetZ, rotX, rotY, rotZ, syncRotation ? 1 : 0); } public static void attachDynamicObjectToPlayer(int object, int playerid, float offsetX, float offsetY, float offsetZ, float rotX, float rotY, float rotZ) { attachDynamicObjectToPlayer.call(object, playerid, offsetX, offsetY, offsetZ, rotX, rotY, rotZ); } public static void attachDynamicObjectToVehicle(int object, int vehicle, float offsetX, float offsetY, float offsetZ, float rotX, float rotY, float rotZ) { attachDynamicObjectToVehicle.call(object, vehicle, offsetX, offsetY, offsetZ, rotX, rotY, rotZ); } public static void editDynamicObject(int playerid, int objectId) { editDynamicObject.call(playerid, objectId); } public static boolean isDynamicObjectMaterialUsed(int objectid, int materialindex) { return (int) isDynamicObjectMaterialUsed.call(objectid, materialindex) > 0; } public static DynamicObjectMaterial getDynamicObjectMaterial(int objectid, int materialindex) { ReferenceInt refModel = new ReferenceInt(0); ReferenceInt refMaterialColor = new ReferenceInt(0); ReferenceString refTxdName = new ReferenceString("", 128); ReferenceString refTextureName = new ReferenceString("", 128); getDynamicObjectMaterial.call(objectid, materialindex, refModel, refTxdName, refTextureName, refMaterialColor, refTxdName.getLength(), refTextureName.getLength()); return new DynamicObjectMaterial(refModel.getValue(), refMaterialColor.getValue(), refTxdName.getValue(), refTextureName.getValue()); } public static void setDynamicObjectMaterial(int objectid, int materialindex, int modelid, String txdname, String texturename, int materialcolor) { setDynamicObjectMaterial.call(objectid, materialindex, modelid, txdname, texturename, materialcolor); } public static boolean isDynamicobjectMaterialTextUsed(int objectid, int materialindex) { return (int) isDynamicObjectMaterialTextUsed.call(objectid, materialindex) > 0; } public static DynamicObjectMaterialText getDynamicObjectMaterialText(int objectid, int materialindex) { ReferenceString refText = new ReferenceString("", 256); ReferenceInt refMaterialSize = new ReferenceInt(0); ReferenceString refFontFace = new ReferenceString("", 64); ReferenceInt refFontSize = new ReferenceInt(0); ReferenceInt refBold = new ReferenceInt(0); ReferenceInt refFontColor = new ReferenceInt(0); ReferenceInt refBackColor = new ReferenceInt(0); ReferenceInt refTextAlignment = new ReferenceInt(0); getDynamicObjectMaterialText.call(objectid, materialindex, refText, refMaterialSize, refFontFace, refFontSize, refBold, refFontColor, refBackColor, refTextAlignment, refText.getLength(), refFontFace.getLength()); return new DynamicObjectMaterialText(refText.getValue(), refFontFace.getValue(), refMaterialSize.getValue(), refFontSize.getValue(), refBold.getValue() > 0, refFontColor.getValue(), refBackColor.getValue(), refTextAlignment.getValue()); } public static void setDynamicObjectMaterialText(int objectid, int materialindex, String text, ObjectMaterialSize materialsize, String fontFace, int fontSize, boolean bold, int fontColor, int backColor, int textAlignment) { setDynamicObjectMaterialText.call(objectid, materialindex, text, materialsize.getValue(), fontFace, fontSize, bold ? 1 : 0, fontColor, backColor, textAlignment); } //Pickups: public static DynamicPickup createDynamicPickup(int modelid, int type, Location location, int playerid, float streamDistance) { return createDynamicPickup(modelid, type, location.x, location.y, location.z, location.worldId, location.interiorId, playerid, streamDistance); } public static DynamicPickup createDynamicPickup(int modelid, int type, float x, float y, float z, int worldid, int interiorid, int playerid, float streamDistance) { int id = (int) createDynamicPickup.call(modelid, type, x,y,z, worldid, interiorid, playerid, streamDistance); return new DynamicPickup(id, modelid, type, playerid, streamDistance); } public static void destroyDynamicPickup(int id) { destroyDynamicPickup.call(id); } public static boolean isValidDynamicPickup(int id) { return (int)isValidDynamicPickup.call(id) > 0; } //3DTextLabels: public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance) { return createDynamic3DTextLabel(text, color, location, drawDistance, 0, Dynamic3DTextLabel.DEFAULT_STREAM_DISTANCE); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance, int testLOS, float streamDistance) { return createDynamic3DTextLabel(text, color, location, drawDistance, testLOS, -1, streamDistance); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance, int testLOS, int playerid, float streamDistance) { return createDynamic3DTextLabel(text, color, location, drawDistance, 0xFFFF, 0xFFFF, testLOS, playerid, streamDistance); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance, int attachedPlayer, int attachedVehicle, int testLOS, int playerid, float streamDistance) { return createDynamic3DTextLabel(text, color, location.x, location.y, location.z, drawDistance, attachedPlayer, attachedVehicle, testLOS, location.worldId, location.interiorId, playerid, streamDistance); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, float x, float y, float z, float drawDistance, int attachedPlayer, int attachedVehicle, int testLOS, int worldid, int interiorid, int playerid, float streamDistance) { int id = (int) createDynamic3DTextLabel.call(text, color.getValue(), x,y,z, drawDistance, attachedPlayer, attachedVehicle, testLOS, worldid, interiorid, playerid, streamDistance); return new Dynamic3DTextLabel(id, playerid, streamDistance, drawDistance); } public static void destroyDynamic3DTextLabel(int id) { destroyDynamic3DTextLabel.call(id); } public static boolean isValidDynamic3DTextLabel(int id) { return (int)isValidDynamic3DTextLabel.call(id) > 0; } public static String getDynamic3DTextLabelText(int id) { String text = ""; getDynamic3DTextLabelText.call(id, text, 1024); // Hope no-one will have length of a label text greater then 1024 :) return text; } public static void updateDynamic3DTextLabelText(int id, Color color, String text) { updateDynamic3DTextLabelText.call(id, color.getValue(), text); } public static void update(Player player) { update(player, StreamerType.ALL); } public static void update(Player player, StreamerType streamerType) { update.call(player.getId(), streamerType.getValue()); } public static void updateEx(Player player, float x, float y, float z, int worldid, int interiorid) { updateEx(player, x, y, z, worldid, interiorid, StreamerType.ALL); } public static void updateEx(Player player, float x, float y, float z, int worldid, int interiorid, StreamerType streamerType) { updateEx.call(player.getId(), x, y, z, worldid, interiorid, streamerType.getValue()); } }
src/main/java/net/gtaun/shoebill/streamer/Functions.java
package net.gtaun.shoebill.streamer; import net.gtaun.shoebill.Shoebill; import net.gtaun.shoebill.amx.AmxCallable; import net.gtaun.shoebill.amx.AmxInstance; import net.gtaun.shoebill.amx.types.ReferenceFloat; import net.gtaun.shoebill.amx.types.ReferenceInt; import net.gtaun.shoebill.amx.types.ReferenceString; import net.gtaun.shoebill.constant.ObjectMaterialSize; import net.gtaun.shoebill.data.Color; import net.gtaun.shoebill.data.Location; import net.gtaun.shoebill.data.Vector3D; import net.gtaun.shoebill.event.amx.AmxLoadEvent; import net.gtaun.shoebill.object.Player; import net.gtaun.shoebill.streamer.data.*; import net.gtaun.util.event.EventManager; import net.gtaun.util.event.EventManagerNode; /** * Created by marvin on 23.07.15 in project streamer-wrapper. * Copyright (c) 2015 Marvin Haschker. All rights reserved. */ public class Functions { private static EventManagerNode eventManagerNode; //Objects: private static AmxCallable createDynamicObject; private static AmxCallable destroyDynamicObject; private static AmxCallable isValidDynamicObject; private static AmxCallable setDynamicObjectPos; private static AmxCallable getDynamicObjectPos; private static AmxCallable setDynamicObjectRot; private static AmxCallable getDynamicObjectRot; private static AmxCallable moveDynamicObject; private static AmxCallable stopDynamicObject; private static AmxCallable isDynamicObjectMoving; private static AmxCallable attachCameraToDynamicObject; private static AmxCallable attachDynamicObjectToObject; private static AmxCallable attachDynamicObjectToPlayer; private static AmxCallable attachDynamicObjectToVehicle; private static AmxCallable editDynamicObject; private static AmxCallable isDynamicObjectMaterialUsed; private static AmxCallable getDynamicObjectMaterial; private static AmxCallable setDynamicObjectMaterial; private static AmxCallable isDynamicObjectMaterialTextUsed; private static AmxCallable getDynamicObjectMaterialText; private static AmxCallable setDynamicObjectMaterialText; //Pickups: private static AmxCallable createDynamicPickup; private static AmxCallable destroyDynamicPickup; private static AmxCallable isValidDynamicPickup; //3DTextLabels: private static AmxCallable createDynamic3DTextLabel; private static AmxCallable destroyDynamic3DTextLabel; private static AmxCallable isValidDynamic3DTextLabel; private static AmxCallable getDynamic3DTextLabelText; private static AmxCallable updateDynamic3DTextLabelText; //Streamer: private static AmxCallable update; private static AmxCallable updateEx; public static void registerHandlers(EventManager eventManager) { eventManagerNode = eventManager.createChildNode(); AmxInstance amxInstance = AmxInstance.getDefault(); findObjectFunctions(amxInstance); findPickupFunctions(amxInstance); find3DTextLabelFunctions(amxInstance); findStreamerFunctions(amxInstance); } public static void unregisterHandlers() { eventManagerNode.cancelAll(); eventManagerNode.destroy(); eventManagerNode = null; } private static void findObjectFunctions(AmxInstance instance) { AmxCallable tickFunc = instance.getNative("Streamer_GetTickRate"); if(tickFunc != null && createDynamicObject == null) { createDynamicObject = instance.getNative("CreateDynamicObject"); destroyDynamicObject = instance.getNative("DestroyDynamicObject"); isValidDynamicObject = instance.getNative("IsValidDynamicObject"); setDynamicObjectPos = instance.getNative("SetDynamicObjectPos"); getDynamicObjectPos = instance.getNative("GetDynamicObjectPos"); setDynamicObjectRot = instance.getNative("SetDynamicObjectRot"); getDynamicObjectRot = instance.getNative("GetDynamicObjectRot"); moveDynamicObject = instance.getNative("MoveDynamicObject"); stopDynamicObject = instance.getNative("StopDynamicObject"); isDynamicObjectMoving = instance.getNative("IsDynamicObjectMoving"); attachCameraToDynamicObject = instance.getNative("AttachCameraToDynamicObject"); attachDynamicObjectToObject = instance.getNative("AttachDynamicObjectToObject"); attachDynamicObjectToPlayer = instance.getNative("AttachDynamicObjectToPlayer"); attachDynamicObjectToVehicle = instance.getNative("AttachDynamicObjectToVehicle"); editDynamicObject = instance.getNative("EditDynamicObject"); isDynamicObjectMaterialUsed = instance.getNative("IsDynamicObjectMaterialUsed"); getDynamicObjectMaterial = instance.getNative("GetDynamicObjectMaterial"); setDynamicObjectMaterial = instance.getNative("SetDynamicObjectMaterial"); isDynamicObjectMaterialTextUsed = instance.getNative("IsDynamicObjectMaterialTextUsed"); getDynamicObjectMaterialText = instance.getNative("GetDynamicObjectMaterialText"); setDynamicObjectMaterialText = instance.getNative("SetDynamicObjectMaterialText"); } } private static void findPickupFunctions(AmxInstance instance) { if (createDynamicPickup == null) { createDynamicPickup = instance.getNative("CreateDynamicPickup"); destroyDynamicPickup = instance.getNative("DestroyDynamicPickup"); isValidDynamicPickup = instance.getNative("IsValidDynamicPickup"); } } private static void find3DTextLabelFunctions(AmxInstance instance) { if (createDynamic3DTextLabel == null) { createDynamic3DTextLabel = instance.getNative("CreateDynamic3DTextLabel"); destroyDynamic3DTextLabel = instance.getNative("DestroyDynamic3DTextLabel"); isValidDynamic3DTextLabel = instance.getNative("IsValidDynamic3DTextLabel"); getDynamic3DTextLabelText = instance.getNative("GetDynamic3DTextLabelText"); updateDynamic3DTextLabelText = instance.getNative("UpdateDynamic3DTextLabelText"); } } private static void findStreamerFunctions(AmxInstance instance) { update = instance.getNative("Streamer_Update"); updateEx = instance.getNative("Streamer_UpdateEx"); } public static DynamicObject createDynamicObject(int modelid, Location location, Vector3D rotation) { return createDynamicObject(modelid, location, rotation, DynamicObject.DEFAULT_STREAM_DISTANCE, DynamicObject.DEFAULT_DRAW_DISTANCE); } public static DynamicObject createDynamicObject(int modelid, Location location, Vector3D rotation, float streamDistance, float drawDistance) { return createDynamicObject(modelid, location, rotation, -1, streamDistance, drawDistance); } public static DynamicObject createDynamicObject(int modelid, Location location, Vector3D rotation, int playerid, float streamDistance, float drawDistance) { return createDynamicObject(modelid, location.x, location.y, location.z, rotation.x, rotation.y, rotation.z, location.worldId, location.interiorId, playerid, streamDistance, drawDistance); } public static DynamicObject createDynamicObject(int modelid, float x, float y, float z, float rX, float rY, float rZ, int worldId, int interiorId, int playerId, float streamDistance, float drawDistance) { createDynamicObject = Shoebill.get().getAmxInstanceManager().getAmxInstances().iterator().next().getNative("CreateDynamicObject"); int id = (int)createDynamicObject.call(modelid, x, y, z, rX, rY, rZ, worldId, interiorId, playerId, streamDistance, drawDistance); return new DynamicObject(id, modelid, playerId, streamDistance, drawDistance); } public static void destroyDynamicObject(DynamicObject object) { destroyDynamicObject(object.getId()); } public static void destroyDynamicObject(int id) { destroyDynamicObject.call(id); } public static boolean isValidDynamicObject(DynamicObject object) { return isValidDynamicObject(object.getId()); } public static boolean isValidDynamicObject(int id) { return (int) isValidDynamicObject.call(id) > 0; } public static void setDynamicObjectPos(DynamicObject object, Vector3D pos) { setDynamicObjectPos(object.getId(), pos); } public static void setDynamicObjectPos(int id, Vector3D pos) { setDynamicObjectPos(id, pos.x, pos.y, pos.z); } public static void setDynamicObjectPos(int id, float x, float y, float z) { setDynamicObjectPos.call(id, x, y, z); } public static Vector3D getDynamicObjectPos(DynamicObject object) { return getDynamicObjectPos(object.getId()); } public static Vector3D getDynamicObjectPos(int id) { ReferenceFloat refX = new ReferenceFloat(0.0f); ReferenceFloat refY = new ReferenceFloat(0.0f); ReferenceFloat refZ = new ReferenceFloat(0.0f); getDynamicObjectPos.call(id, refX, refY, refZ); return new Vector3D(refX.getValue(), refY.getValue(), refZ.getValue()); } public static void setDynamicObjectRot(DynamicObject object, Vector3D rot) { setDynamicObjectRot(object.getId(), rot); } public static void setDynamicObjectRot(int id, Vector3D rot) { setDynamicObjectRot(id, rot.x, rot.y, rot.z); } public static void setDynamicObjectRot(int id, float x, float y, float z) { setDynamicObjectRot.call(id, x, y, z); } public static Vector3D getDynamicObjectRot(DynamicObject object) { return getDynamicObjectRot(object.getId()); } public static Vector3D getDynamicObjectRot(int id) { ReferenceFloat refX = new ReferenceFloat(0.0f); ReferenceFloat refY = new ReferenceFloat(0.0f); ReferenceFloat refZ = new ReferenceFloat(0.0f); getDynamicObjectRot.call(id, refX, refY, refZ); return new Vector3D(refX.getValue(), refY.getValue(), refZ.getValue()); } public static void moveDynamicObject(int id, Vector3D newPos, float speed, Vector3D newRot) { moveDynamicObject.call(id, newPos.x, newPos.y, newPos.z, speed, newRot.x, newRot.y, newRot.z); } public static void stopDynamicObject(int id) { stopDynamicObject.call(id); } public static boolean isDynamicObjectMoving(int id) { return (int)isDynamicObjectMoving.call(id) > 0; } public static void attachCameraToDynamicObject(int playerid, int objectId) { attachCameraToDynamicObject.call(playerid, objectId); } public static void attachDynamicObjectToObject(int object, int toObject, float offsetX, float offsetY, float offsetZ, float rotX, float rotY, float rotZ, boolean syncRotation) { attachDynamicObjectToObject.call(object, toObject, offsetX, offsetY, offsetZ, rotX, rotY, rotZ, syncRotation ? 1 : 0); } public static void attachDynamicObjectToPlayer(int object, int playerid, float offsetX, float offsetY, float offsetZ, float rotX, float rotY, float rotZ) { attachDynamicObjectToPlayer.call(object, playerid, offsetX, offsetY, offsetZ, rotX, rotY, rotZ); } public static void attachDynamicObjectToVehicle(int object, int vehicle, float offsetX, float offsetY, float offsetZ, float rotX, float rotY, float rotZ) { attachDynamicObjectToVehicle.call(object, vehicle, offsetX, offsetY, offsetZ, rotX, rotY, rotZ); } public static void editDynamicObject(int playerid, int objectId) { editDynamicObject.call(playerid, objectId); } public static boolean isDynamicObjectMaterialUsed(int objectid, int materialindex) { return (int) isDynamicObjectMaterialUsed.call(objectid, materialindex) > 0; } public static DynamicObjectMaterial getDynamicObjectMaterial(int objectid, int materialindex) { ReferenceInt refModel = new ReferenceInt(0); ReferenceInt refMaterialColor = new ReferenceInt(0); ReferenceString refTxdName = new ReferenceString("", 128); ReferenceString refTextureName = new ReferenceString("", 128); getDynamicObjectMaterial.call(objectid, materialindex, refModel, refTxdName, refTextureName, refMaterialColor, refTxdName.getLength(), refTextureName.getLength()); return new DynamicObjectMaterial(refModel.getValue(), refMaterialColor.getValue(), refTxdName.getValue(), refTextureName.getValue()); } public static void setDynamicObjectMaterial(int objectid, int materialindex, int modelid, String txdname, String texturename, int materialcolor) { setDynamicObjectMaterial.call(objectid, materialindex, modelid, txdname, texturename, materialcolor); } public static boolean isDynamicobjectMaterialTextUsed(int objectid, int materialindex) { return (int) isDynamicObjectMaterialTextUsed.call(objectid, materialindex) > 0; } public static DynamicObjectMaterialText getDynamicObjectMaterialText(int objectid, int materialindex) { ReferenceString refText = new ReferenceString("", 256); ReferenceInt refMaterialSize = new ReferenceInt(0); ReferenceString refFontFace = new ReferenceString("", 64); ReferenceInt refFontSize = new ReferenceInt(0); ReferenceInt refBold = new ReferenceInt(0); ReferenceInt refFontColor = new ReferenceInt(0); ReferenceInt refBackColor = new ReferenceInt(0); ReferenceInt refTextAlignment = new ReferenceInt(0); getDynamicObjectMaterialText.call(objectid, materialindex, refText, refMaterialSize, refFontFace, refFontSize, refBold, refFontColor, refBackColor, refTextAlignment, refText.getLength(), refFontFace.getLength()); return new DynamicObjectMaterialText(refText.getValue(), refFontFace.getValue(), refMaterialSize.getValue(), refFontSize.getValue(), refBold.getValue() > 0, refFontColor.getValue(), refBackColor.getValue(), refTextAlignment.getValue()); } public static void setDynamicObjectMaterialText(int objectid, int materialindex, String text, ObjectMaterialSize materialsize, String fontFace, int fontSize, boolean bold, int fontColor, int backColor, int textAlignment) { setDynamicObjectMaterialText.call(objectid, materialindex, text, materialsize.getValue(), fontFace, fontSize, bold ? 1 : 0, fontColor, backColor, textAlignment); } //Pickups: public static DynamicPickup createDynamicPickup(int modelid, int type, Location location, int playerid, float streamDistance) { return createDynamicPickup(modelid, type, location.x, location.y, location.z, location.worldId, location.interiorId, playerid, streamDistance); } public static DynamicPickup createDynamicPickup(int modelid, int type, float x, float y, float z, int worldid, int interiorid, int playerid, float streamDistance) { int id = (int) createDynamicPickup.call(modelid, type, x,y,z, worldid, interiorid, playerid, streamDistance); return new DynamicPickup(id, modelid, type, playerid, streamDistance); } public static void destroyDynamicPickup(int id) { destroyDynamicPickup.call(id); } public static boolean isValidDynamicPickup(int id) { return (int)isValidDynamicPickup.call(id) > 0; } //3DTextLabels: public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location) { return createDynamic3DTextLabel(text, color, location, Dynamic3DTextLabel.DEFAULT_STREAM_DISTANCE, 0, Dynamic3DTextLabel.DEFAULT_STREAM_DISTANCE); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance, int testLOS, float streamDistance) { return createDynamic3DTextLabel(text, color, location, drawDistance, testLOS, -1, streamDistance); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance, int testLOS, int playerid, float streamDistance) { return createDynamic3DTextLabel(text, color, location, drawDistance, 0xFFFF, 0xFFFF, testLOS, playerid, streamDistance); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, Location location, float drawDistance, int attachedPlayer, int attachedVehicle, int testLOS, int playerid, float streamDistance) { return createDynamic3DTextLabel(text, color, location.x, location.y, location.z, drawDistance, attachedPlayer, attachedVehicle, testLOS, location.worldId, location.interiorId, playerid, streamDistance); } public static Dynamic3DTextLabel createDynamic3DTextLabel(String text, Color color, float x, float y, float z, float drawDistance, int attachedPlayer, int attachedVehicle, int testLOS, int worldid, int interiorid, int playerid, float streamDistance) { int id = (int) createDynamic3DTextLabel.call(text, color.getValue(), x,y,z, drawDistance, attachedPlayer, attachedVehicle, testLOS, worldid, interiorid, playerid, streamDistance); return new Dynamic3DTextLabel(id, playerid, streamDistance, drawDistance); } public static void destroyDynamic3DTextLabel(int id) { destroyDynamic3DTextLabel.call(id); } public static boolean isValidDynamic3DTextLabel(int id) { return (int)isValidDynamic3DTextLabel.call(id) > 0; } public static String getDynamic3DTextLabelText(int id) { String text = ""; getDynamic3DTextLabelText.call(id, text, 1024); // Hope no-one will have length of a label text greater then 1024 :) return text; } public static void updateDynamic3DTextLabelText(int id, Color color, String text) { updateDynamic3DTextLabelText.call(id, color.getValue(), text); } public static void update(Player player) { update(player, StreamerType.ALL); } public static void update(Player player, StreamerType streamerType) { update.call(player.getId(), streamerType.getValue()); } public static void updateEx(Player player, float x, float y, float z, int worldid, int interiorid) { updateEx(player, x, y, z, worldid, interiorid, StreamerType.ALL); } public static void updateEx(Player player, float x, float y, float z, int worldid, int interiorid, StreamerType streamerType) { updateEx.call(player.getId(), x, y, z, worldid, interiorid, streamerType.getValue()); } }
Add drawDistance parameter to createDynamic3DTextLabel method as an obligatory
src/main/java/net/gtaun/shoebill/streamer/Functions.java
Add drawDistance parameter to createDynamic3DTextLabel method as an obligatory
Java
agpl-3.0
e472a962347603c8cbcf820e32fb323d719977b3
0
jpaoletti/java-presentation-manager,jpaoletti/java-presentation-manager
package jpaoletti.jpm.core; import java.util.ArrayList; import java.util.List; /** * @author jpaoletti * * This class encapsulate an entity, its list and everything associated to an * entity. An instance of this class is inserted in session under demand and * stay in session for fast reference. * */ public class EntityContainer { private String id; private Entity entity; private PaginatedList list; private List<InstanceId> selectedInstances; private EntityInstanceWrapper selected; private boolean selectedNew; private EntityFilter filter; private EntityContainer owner; private Operation operation; /** * Main constructor * * @param entity The contained entity * @param sid The session id */ public EntityContainer(Entity entity) { super(); this.entity = entity; this.id = buildId(entity.getId()); this.selectedNew = false; } /** * Builds a string based on a session id and the entity id. Not implemented. * * @param sid A session id * @param eid The entity id * @return The resulting string */ public static String buildId(String eid) { //return sid.substring(0,20) + eid.hashCode() + sid.substring(20); return eid; } /** * Getter for the id * * @return The id */ public String getId() { return id; } /** * * @param id */ public void setId(String id) { this.id = id; } /** * Getter for the entity * * @return The entity */ public Entity getEntity() { return entity; } /** * * @param entity */ public void setEntity(Entity entity) { this.entity = entity; } /** * Getter for the list * * @return The list */ public PaginatedList getList() { return list; } /** * * @param list */ public void setList(PaginatedList list) { this.list = list; } /** * Setter for selected instance * * @param selected */ public void setSelected(EntityInstanceWrapper selected) { this.selected = selected; setSelectedNew(false); } /** * Getter for the selected instance wrapper * * @return The wrapper */ public EntityInstanceWrapper getSelected() { return selected; } /** * * @param new_ */ public void setSelectedNew(boolean new_) { this.selectedNew = new_; } /** * Indicate if the actual selected is new * * @return true when selected is new */ public boolean isSelectedNew() { return selectedNew; } /** * @param filter the filter to set */ public void setFilter(EntityFilter filter) { this.filter = filter; } /** * @return the filter */ public EntityFilter getFilter() { return filter; } /** * @return the selected instances ids */ public List<InstanceId> getSelectedInstanceIds() { if (selectedInstances == null) { selectedInstances = new ArrayList<InstanceId>(); } return selectedInstances; } /** * Getter for the owner * * @return The owner */ public EntityContainer getOwner() { return owner; } /** * * @param owner */ public void setOwner(EntityContainer owner) { this.owner = owner; } /** * Getter for the operation * * @return The operation */ public Operation getOperation() { return operation; } /** * * @param operation */ public void setOperation(Operation operation) { this.operation = operation; } public boolean isSelected(InstanceId id) { return getSelectedInstanceIds().contains(id); } @Override public boolean equals(Object obj) { if (obj == null) { return false; } if (getClass() != obj.getClass()) { return false; } final EntityContainer other = (EntityContainer) obj; if ((this.getId() == null) ? (other.getId() != null) : !this.getId().equals(other.getId())) { return false; } return true; } @Override public int hashCode() { int hash = 7; hash = 41 * hash + (this.getId() != null ? this.getId().hashCode() : 0); return hash; } }
modules/jpm-core/src/main/java/jpaoletti/jpm/core/EntityContainer.java
package jpaoletti.jpm.core; import java.util.ArrayList; import java.util.List; /** * @author jpaoletti * * This class encapsulate an entity, its list and everything associated to an * entity. An instance of this class is inserted in session under demand and * stay in session for fast reference. * */ public class EntityContainer { private String id; private Entity entity; private PaginatedList list; private List<InstanceId> selectedInstances; private EntityInstanceWrapper selected; private boolean selectedNew; private EntityFilter filter; private EntityContainer owner; private Operation operation; /** * Main constructor * * @param entity The contained entity * @param sid The session id */ public EntityContainer(Entity entity) { super(); this.entity = entity; this.id = buildId(entity.getId()); this.selectedNew = false; } /** * Builds a string based on a session id and the entity id. Not implemented. * * @param sid A session id * @param eid The entity id * @return The resulting string */ public static String buildId(String eid) { //return sid.substring(0,20) + eid.hashCode() + sid.substring(20); return eid; } /** * Getter for the id * * @return The id */ public String getId() { return id; } /** * * @param id */ public void setId(String id) { this.id = id; } /** * Getter for the entity * * @return The entity */ public Entity getEntity() { return entity; } /** * * @param entity */ public void setEntity(Entity entity) { this.entity = entity; } /** * Getter for the list * * @return The list */ public PaginatedList getList() { return list; } /** * * @param list */ public void setList(PaginatedList list) { this.list = list; } /** * Setter for selected instance * * @param selected */ public void setSelected(EntityInstanceWrapper selected) { this.selected = selected; setSelectedNew(false); } /** * Getter for the selected instance wrapper * * @return The wrapper */ public EntityInstanceWrapper getSelected() { return selected; } /** * * @param new_ */ public void setSelectedNew(boolean new_) { this.selectedNew = new_; } /** * Indicate if the actual selected is new * * @return true when selected is new */ public boolean isSelectedNew() { return selectedNew; } /** * @param filter the filter to set */ public void setFilter(EntityFilter filter) { this.filter = filter; } /** * @return the filter */ public EntityFilter getFilter() { return filter; } /** * @return the selected instances ids */ public List<InstanceId> getSelectedInstanceIds() { if (selectedInstances == null) { selectedInstances = new ArrayList<InstanceId>(); } return selectedInstances; } /** * Getter for the owner * * @return The owner */ public EntityContainer getOwner() { return owner; } /** * * @param owner */ public void setOwner(EntityContainer owner) { this.owner = owner; } /** * Getter for the operation * * @return The operation */ public Operation getOperation() { return operation; } /** * * @param operation */ public void setOperation(Operation operation) { this.operation = operation; } public boolean isSelected(InstanceId id) { return getSelectedInstanceIds().contains(id); } }
added equals to entity container
modules/jpm-core/src/main/java/jpaoletti/jpm/core/EntityContainer.java
added equals to entity container
Java
lgpl-2.1
df7299c294b19fe91ad3ec101c891010b29db49c
0
RodolpheFouquet/gpac,DmitrySigaev/gpac,gpac/gpac,rbouqueau/gpac_brew_travis,epam/gpac,canatella/gpac,rbouqueau/gpac,rauf/gpac,DmitrySigaev/gpac,rbouqueau/gpac,DmitrySigaev/gpac,canatella/gpac,ARSekkat/gpac,epam/gpac,rauf/gpac,gpac/gpac,rbouqueau/gpac_brew_travis,aymanelyaagoubi/gpac,psteinb/gpac,emmanouil/gpac,rbouqueau/gpac,porcelijn/gpac,psteinb/gpac,canatella/gpac,rbouqueau/gpac_brew_travis,emmanouil/gpac,rauf/gpac,nguyen-viet-thanh-trung/gpac,psteinb/gpac,ARSekkat/gpac,aymanelyaagoubi/gpac,psteinb/gpac,canatella/gpac,rauf/gpac,aymanelyaagoubi/gpac,psteinb/gpac,epam/gpac,rbouqueau/gpac_brew_travis,rbouqueau/gpac_brew_travis,rbouqueau/gpac,epam/gpac,psteinb/gpac,emmanouil/gpac,rauf/gpac,vladimir-kazakov/gpac,vladimir-kazakov/gpac,rbouqueau/gpac,gpac/gpac,epam/gpac,porcelijn/gpac,canatella/gpac,Bevara/Access-open,aymanelyaagoubi/gpac,ARSekkat/gpac,gpac/gpac,drakeguan/gpac,rauf/gpac,ARSekkat/gpac,drakeguan/gpac,vladimir-kazakov/gpac,RodolpheFouquet/gpac,gpac/gpac,DmitrySigaev/gpac,RodolpheFouquet/gpac,Bevara/Access-open,drakeguan/gpac,drakeguan/gpac,emmanouil/gpac,aymanelyaagoubi/gpac,vladimir-kazakov/gpac,Bevara/Access-open,aymanelyaagoubi/gpac,rbouqueau/gpac_brew_travis,Bevara/Access-open,DmitrySigaev/gpac,vladimir-kazakov/gpac,porcelijn/gpac,gpac/gpac,Bevara/Access-open,canatella/gpac,drakeguan/gpac,epam/gpac,porcelijn/gpac,porcelijn/gpac,rbouqueau/gpac,emmanouil/gpac,RodolpheFouquet/gpac,nguyen-viet-thanh-trung/gpac,DmitrySigaev/gpac,Bevara/Access-open,canatella/gpac,rbouqueau/gpac,rbouqueau/gpac,gpac/gpac,RodolpheFouquet/gpac,porcelijn/gpac,gpac/gpac,nguyen-viet-thanh-trung/gpac,nguyen-viet-thanh-trung/gpac,ARSekkat/gpac,nguyen-viet-thanh-trung/gpac,nguyen-viet-thanh-trung/gpac,vladimir-kazakov/gpac,RodolpheFouquet/gpac,drakeguan/gpac,ARSekkat/gpac,emmanouil/gpac
/** * Osmo on Android * Aug/2010 * NGO Van Luyen * $Id$ * */ package com.artemis.Osmo4; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import android.app.Activity; import android.app.AlertDialog; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.DialogInterface.OnClickListener; import android.content.res.Configuration; import android.net.Uri; import android.os.Bundle; import android.os.PowerManager; import android.os.PowerManager.WakeLock; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.Window; import android.widget.ArrayAdapter; import android.widget.AutoCompleteTextView; import android.widget.Toast; /** * The main Osmo4 activity, used to launch everything * * @version $Revision$ * */ public class Osmo4 extends Activity implements GpacCallback { private String[] m_modules_list; private final boolean fastStartup = false; private final static int DEFAULT_BUFFER_SIZE = 8192; private final String DEFAULT_OPEN_URL = Osmo4Renderer.GPAC_CFG_DIR + "gui/gui.bt"; //$NON-NLS-1$ /** * Activity request ID for picking a file from local filesystem */ public final static int PICK_FILE_REQUEST = 1; private final static String LOG_OSMO_TAG = "Osmo4"; //$NON-NLS-1$ private final static String OK_BUTTON = "OK"; //$NON-NLS-1$ /** * List of all extensions recognized by Osmo */ public final static String OSMO_REGISTERED_FILE_EXTENSIONS = "*.mp4,*.bt,*.xmt,*.xml,*.ts,*.svg,*.mp3,*.m3u8,*.mpg,*.aac,*.m4a,*.jpg,*.png"; //$NON-NLS-1$ private PowerManager.WakeLock wl = null; private Osmo4Renderer renderer; private synchronized Osmo4Renderer getRenderer() { return renderer; } // --------------------------------------- @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_PROGRESS); // requestWindowFeature(Window.FEATURE_CUSTOM_TITLE); requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); final Osmo4GLSurfaceView mGLView = new Osmo4GLSurfaceView(Osmo4.this); mGLView.setFocusable(true); mGLView.setFocusableInTouchMode(true); final String name = "Osmo4"; //$NON-NLS-1$ final String toOpen; if (Intent.ACTION_VIEW.equals(getIntent().getAction())) { Uri uri = getIntent().getData(); if (uri != null) { synchronized (this) { toOpen = uri.toString(); } } else toOpen = null; } else toOpen = null; setProgress(1000); service.submit(new Runnable() { @Override public void run() { displayPopup("Copying native libraries...", name); //$NON-NLS-1$ loadAllModules(); runOnUiThread(new Runnable() { @Override public void run() { setProgress(5000); } }); displayPopup("Loading GPAC Renderer $Revision$...", name); //$NON-NLS-1$ runOnUiThread(new Runnable() { @Override public void run() { setProgress(9000); PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); WakeLock wl = pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, LOG_OSMO_TAG); if (wl != null) wl.acquire(); synchronized (Osmo4.this) { Osmo4.this.wl = wl; } synchronized (Osmo4.this) { renderer = new Osmo4Renderer(Osmo4.this, toOpen); mGLView.setRenderer(renderer); } displayPopup("Now loading, please wait...", name); //$NON-NLS-1$ setContentView(mGLView); } }); } }); } // --------------------------------------- @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.main_menu, menu); return true; } private String getRecentURLsFile() { return Osmo4Renderer.GPAC_CFG_DIR + "recentURLs.txt"; //$NON-NLS-1$ } private boolean openURL() { Future<String[]> res = service.submit(new Callable<String[]>() { @Override public String[] call() throws Exception { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(getRecentURLsFile()), DEFAULT_ENCODING)); String s = null; Set<String> results = new HashSet<String>(); while (null != (s = reader.readLine())) { results.add(s); } addAllRecentURLs(results); return results.toArray(new String[0]); } finally { if (reader != null) reader.close(); } } }); AlertDialog.Builder builder = new AlertDialog.Builder(this); final AutoCompleteTextView textView = new AutoCompleteTextView(this); builder.setMessage("Please enter an URL to connect to...") //$NON-NLS-1$ .setCancelable(true) .setPositiveButton("Open URL", new DialogInterface.OnClickListener() { //$NON-NLS-1$ public void onClick(DialogInterface dialog, int id) { dialog.cancel(); final String newURL = textView.getText().toString(); openURLasync(newURL); assert (renderer != null); service.execute(new Runnable() { @Override public void run() { addAllRecentURLs(Collections.singleton(newURL)); File tmp = new File(getRecentURLsFile() + ".tmp"); //$NON-NLS-1$ BufferedWriter w = null; try { w = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tmp), DEFAULT_ENCODING)); Collection<String> toWrite = getAllRecentURLs(); for (String s : toWrite) { w.write(s); w.write("\n"); //$NON-NLS-1$ } w.close(); w = null; if (tmp.renameTo(new File(getRecentURLsFile()))) Log.e(LOG_OSMO_TAG, "Failed to rename " + tmp + " to " + getRecentURLsFile()); //$NON-NLS-1$//$NON-NLS-2$ } catch (IOException e) { Log.e(LOG_OSMO_TAG, "Failed to write recent URLs to " + tmp, e); //$NON-NLS-1$ try { if (w != null) w.close(); } catch (IOException ex) { Log.e(LOG_OSMO_TAG, "Failed to close stream " + tmp, ex); //$NON-NLS-1$ } } } }); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { //$NON-NLS-1$ public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); textView.setText("http://"); //$NON-NLS-1$ builder.setView(textView); builder.create(); builder.show(); ArrayAdapter<String> adapter; try { adapter = new ArrayAdapter<String>(this, android.R.layout.simple_dropdown_item_1line, res.get(1, TimeUnit.SECONDS)); textView.setAdapter(adapter); } catch (ExecutionException e) { // Ignored Log.e(LOG_OSMO_TAG, "Error while parsing recent URLs", e); //$NON-NLS-1$ } catch (TimeoutException e) { Log.e(LOG_OSMO_TAG, "It took too long to parse recent URLs", e); //$NON-NLS-1$ } catch (InterruptedException e) { Log.e(LOG_OSMO_TAG, "Interrupted while parsing recent URLs", e); //$NON-NLS-1$ } return true; } private final ExecutorService service = Executors.newSingleThreadExecutor(); private final Set<String> allRecentURLs = new HashSet<String>(); private synchronized void addAllRecentURLs(Collection<String> urlsToAdd) { allRecentURLs.addAll(urlsToAdd); } private synchronized Collection<String> getAllRecentURLs() { return new ArrayList<String>(allRecentURLs); } private final static Charset DEFAULT_ENCODING = Charset.forName("UTF-8"); //$NON-NLS-1$ /** * Opens a new activity to select a file * * @return true if activity has been selected */ private boolean openFileDialog() { Intent intent = new Intent("org.openintents.action.PICK_FILE"); //$NON-NLS-1$ //Intent intent = new Intent("org.openintents.action.PICK_FILE"); //$NON-NLS-1$ intent.setData(Uri.fromFile(new File(Osmo4Renderer.GPAC_CFG_DIR))); intent.putExtra("org.openintents.extra.TITLE", "Please select a file"); //$NON-NLS-1$//$NON-NLS-2$ intent.putExtra("browser_filter_extension_whitelist", OSMO_REGISTERED_FILE_EXTENSIONS); //$NON-NLS-1$ try { startActivityForResult(intent, PICK_FILE_REQUEST); return true; } catch (ActivityNotFoundException e) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("Impossible to find an Intent to choose a file... Cannot open file !") //$NON-NLS-1$ .setCancelable(true) .setPositiveButton("Close", new DialogInterface.OnClickListener() { //$NON-NLS-1$ public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); AlertDialog alert = builder.create(); alert.show(); return false; } } // --------------------------------------- @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { if (requestCode == PICK_FILE_REQUEST) { if (resultCode == RESULT_OK) { Uri uri = intent.getData(); if (uri != null) { openURLasync(uri.toString()); } } } } private void openURLasync(final String url) { Osmo4Renderer renderer = getRenderer(); runOnUiThread(new Runnable() { @Override public void run() { if (DEFAULT_OPEN_URL.equals(url)) setTitle(LOG_OSMO_TAG + " - Home"); //$NON-NLS-1$ else setTitle(LOG_OSMO_TAG + " - " + url); //$NON-NLS-1$ } }); if (renderer == null) displayPopup("Renderer should not be null", "ERROR"); //$NON-NLS-1$ //$NON-NLS-2$ else { GPACInstance i = renderer.getInstance(); if (i != null) i.connect(url); } } // --------------------------------------- @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } // --------------------------------------- @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case R.id.open_url: return openURL(); case R.id.open_file: // newGame(); return openFileDialog(); case R.id.quit: this.finish(); // quit(); return true; default: return super.onOptionsItemSelected(item); } } // --------------------------------------- @Override protected void onDestroy() { service.shutdown(); synchronized (this) { if (wl != null) wl.release(); } Osmo4Renderer renderer = getRenderer(); if (renderer != null) { GPACInstance instance = renderer.getInstance(); Log.d(LOG_OSMO_TAG, "Disconnecting instance..."); //$NON-NLS-1$ instance.disconnect(); Log.d(LOG_OSMO_TAG, "Destroying GPAC instance..."); //$NON-NLS-1$ instance.destroy(); } super.onDestroy(); } // --------------------------------------- private void loadAllModules() { Log.i(LOG_OSMO_TAG, "Start loading all modules..."); //$NON-NLS-1$ long start = System.currentTimeMillis(); byte buffer[] = new byte[1024]; int[] ids = getAllRawResources(); for (int i = 0; i < ids.length; i++) { OutputStream fos = null; InputStream ins = null; String fn = Osmo4Renderer.GPAC_MODULES_DIR + m_modules_list[i] + ".so"; //$NON-NLS-1$ File finalFile = new File(fn); // If file has already been copied, not need to do it again if (finalFile.exists() && finalFile.canRead() && fastStartup) { Log.i(LOG_OSMO_TAG, "Skipping " + finalFile); //$NON-NLS-1$ continue; } try { Log.i(LOG_OSMO_TAG, "Copying resource " + ids[i] + " to " //$NON-NLS-1$//$NON-NLS-2$ + finalFile.getAbsolutePath()); File tmpFile = new File(fn + ".tmp"); //$NON-NLS-1$ int read; ins = new BufferedInputStream(getResources().openRawResource(ids[i]), DEFAULT_BUFFER_SIZE); fos = new BufferedOutputStream(new FileOutputStream(tmpFile), DEFAULT_BUFFER_SIZE); while (0 < (read = ins.read(buffer))) { fos.write(buffer, 0, read); } ins.close(); ins = null; fos.close(); fos = null; if (!tmpFile.renameTo(finalFile)) Log.e(LOG_OSMO_TAG, "Failed to rename " + tmpFile.getAbsolutePath() + " to " //$NON-NLS-1$//$NON-NLS-2$ + finalFile.getAbsolutePath()); } catch (IOException e) { Log.e(LOG_OSMO_TAG, "IOException for resource : " + ids[i], e); //$NON-NLS-1$ } finally { if (ins != null) { try { ins.close(); } catch (IOException e) { Log.e(LOG_OSMO_TAG, "Error while closing read stream", e); //$NON-NLS-1$ } } if (fos != null) { try { fos.close(); } catch (IOException e) { Log.e(LOG_OSMO_TAG, "Error while closing write stream", e); //$NON-NLS-1$ } } } } Log.i(LOG_OSMO_TAG, "Done loading all modules, took " + (System.currentTimeMillis() - start) + "ms."); //$NON-NLS-1$ //$NON-NLS-2$ } private int[] getAllRawResources() throws RuntimeException { int[] ids = null; R.raw r = new R.raw(); java.lang.reflect.Field fields[] = R.raw.class.getDeclaredFields(); ids = new int[fields.length]; m_modules_list = new String[fields.length]; try { for (int i = 0; i < fields.length; i++) { java.lang.reflect.Field f = fields[i]; ids[i] = f.getInt(r); m_modules_list[i] = f.getName(); Log.i(LOG_OSMO_TAG, "R.raw." + f.getName() + " = 0x" + Integer.toHexString(ids[i])); //$NON-NLS-1$ //$NON-NLS-2$ } } catch (IllegalArgumentException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } return ids; } // --------------------------------------- private String lastDisplayedMessage; private void displayPopup(String message, String title) { final String fullMsg = title + '\n' + message; synchronized (this) { if (fullMsg.equals(lastDisplayedMessage)) return; lastDisplayedMessage = fullMsg; } runOnUiThread(new Runnable() { @Override public void run() { Toast toast = Toast.makeText(Osmo4.this, fullMsg, Toast.LENGTH_SHORT); toast.show(); } }); } /** * @see com.artemis.Osmo4.GpacCallback#displayMessage(String, String, int) */ @Override public void displayMessage(final String message, final String title, final int status) { if (status == GF_Err.GF_OK.value) displayPopup(message, title); else { runOnUiThread(new Runnable() { @Override public void run() { StringBuilder sb = new StringBuilder(); sb.append(GF_Err.getError(status)); sb.append(' '); sb.append(title); AlertDialog.Builder builder = new AlertDialog.Builder(Osmo4.this); builder.setTitle(sb.toString()); sb.append('\n'); sb.append(message); builder.setMessage(sb.toString()); builder.setCancelable(true); builder.setPositiveButton(OK_BUTTON, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } }); } } /** * @see com.artemis.Osmo4.GpacCallback#log(int, int, java.lang.String, java.lang.Object[]) */ @Override public void log(int level, int module, String message, Object... arguments) { } /** * @see com.artemis.Osmo4.GpacCallback#onProgress(java.lang.String, int, int) */ @Override public void onProgress(final String msg, final int done, final int total) { runOnUiThread(new Runnable() { @Override public void run() { // GPAC sometimes return total = 0 int progress = done * 10000 / (total < 1 ? 1 : total); if (progress > 9900) progress = 10000; setProgress(progress); } }); } /** * @see com.artemis.Osmo4.GpacCallback#onGPACReady() */ @Override public void onGPACReady() { Log.i(LOG_OSMO_TAG, "GPAC is ready"); //$NON-NLS-1$ runOnUiThread(new Runnable() { @Override public void run() { setProgress(10000); } }); } /** * @see com.artemis.Osmo4.GpacCallback#onGPACError(java.lang.Throwable) */ @Override public void onGPACError(final Throwable e) { Log.e(LOG_OSMO_TAG, "GPAC Error", e); //$NON-NLS-1$ runOnUiThread(new Runnable() { @Override public void run() { StringBuilder sb = new StringBuilder(); sb.append("Failed to init GPAC due to "); //$NON-NLS-1$ sb.append(e.getClass().getSimpleName()); AlertDialog.Builder builder = new AlertDialog.Builder(Osmo4.this); builder.setTitle(sb.toString()); sb.append('\n'); sb.append("Description: "); //$NON-NLS-1$ sb.append(e.getLocalizedMessage()); sb.append('\n'); sb.append("Revision: $Revision$"); //$NON-NLS-1$ builder.setMessage(sb.toString()); builder.setCancelable(true); builder.setPositiveButton(OK_BUTTON, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } }); } }
applications/osmo4_android/src/com/artemis/Osmo4/Osmo4.java
/** * Osmo on Android * Aug/2010 * NGO Van Luyen * $Id$ * */ package com.artemis.Osmo4; import java.io.BufferedInputStream; import java.io.BufferedOutputStream; import java.io.BufferedReader; import java.io.BufferedWriter; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.nio.charset.Charset; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.Set; import java.util.concurrent.Callable; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import android.app.Activity; import android.app.AlertDialog; import android.content.ActivityNotFoundException; import android.content.Context; import android.content.DialogInterface; import android.content.Intent; import android.content.DialogInterface.OnClickListener; import android.content.res.Configuration; import android.net.Uri; import android.os.Bundle; import android.os.PowerManager; import android.os.PowerManager.WakeLock; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.Window; import android.widget.ArrayAdapter; import android.widget.AutoCompleteTextView; import android.widget.Toast; /** * The main Osmo4 activity, used to launch everything * * @version $Revision$ * */ public class Osmo4 extends Activity implements GpacCallback { private String[] m_modules_list; private final boolean fastStartup = false; private final static int DEFAULT_BUFFER_SIZE = 8192; private final String DEFAULT_OPEN_URL = Osmo4Renderer.GPAC_CFG_DIR + "gui/gui.bt"; //$NON-NLS-1$ /** * Activity request ID for picking a file from local filesystem */ public final static int PICK_FILE_REQUEST = 1; private final static String LOG_OSMO_TAG = "Osmo4"; //$NON-NLS-1$ private final static String OK_BUTTON = "OK"; //$NON-NLS-1$ /** * List of all extensions recognized by Osmo */ public final static String OSMO_REGISTERED_FILE_EXTENSIONS = "*.mp4,*.bt,*.xmt,*.xml,*.ts,*.svg,*.mp3,*.m3u8,*.mpg,*.aac,*.m4a,*.jpg,*.png"; //$NON-NLS-1$ private PowerManager.WakeLock wl = null; private Osmo4Renderer renderer; private synchronized Osmo4Renderer getRenderer() { return renderer; } // --------------------------------------- @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); requestWindowFeature(Window.FEATURE_PROGRESS); // requestWindowFeature(Window.FEATURE_CUSTOM_TITLE); requestWindowFeature(Window.FEATURE_INDETERMINATE_PROGRESS); final Osmo4GLSurfaceView mGLView = new Osmo4GLSurfaceView(Osmo4.this); mGLView.setFocusable(true); mGLView.setFocusableInTouchMode(true); final String name = "Osmo4"; //$NON-NLS-1$ final String toOpen; if (Intent.ACTION_VIEW.equals(getIntent().getAction())) { Uri uri = getIntent().getData(); if (uri != null) { synchronized (this) { toOpen = uri.toString(); } } else toOpen = null; } else toOpen = null; setProgress(1000); service.submit(new Runnable() { @Override public void run() { displayPopup("Copying native libraries...", name); //$NON-NLS-1$ loadAllModules(); runOnUiThread(new Runnable() { @Override public void run() { setProgress(5000); } }); displayPopup("Loading GPAC Renderer $Revision$...", name); //$NON-NLS-1$ runOnUiThread(new Runnable() { @Override public void run() { setProgress(9000); PowerManager pm = (PowerManager) getSystemService(Context.POWER_SERVICE); WakeLock wl = pm.newWakeLock(PowerManager.SCREEN_DIM_WAKE_LOCK, LOG_OSMO_TAG); if (wl != null) wl.acquire(); synchronized (Osmo4.this) { Osmo4.this.wl = wl; } synchronized (Osmo4.this) { renderer = new Osmo4Renderer(Osmo4.this, toOpen); mGLView.setRenderer(renderer); } displayPopup("Now loading, please wait...", name); //$NON-NLS-1$ setContentView(mGLView); } }); } }); } // --------------------------------------- @Override public boolean onCreateOptionsMenu(Menu menu) { MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.main_menu, menu); return true; } private String getRecentURLsFile() { return Osmo4Renderer.GPAC_CFG_DIR + "recentURLs.txt"; //$NON-NLS-1$ } private boolean openURL() { Future<String[]> res = service.submit(new Callable<String[]>() { @Override public String[] call() throws Exception { BufferedReader reader = null; try { reader = new BufferedReader(new InputStreamReader(new FileInputStream(getRecentURLsFile()), DEFAULT_ENCODING)); String s = null; Set<String> results = new HashSet<String>(); while (null != (s = reader.readLine())) { results.add(s); } addAllRecentURLs(results); return results.toArray(new String[0]); } finally { if (reader != null) reader.close(); } } }); AlertDialog.Builder builder = new AlertDialog.Builder(this); final AutoCompleteTextView textView = new AutoCompleteTextView(this); builder.setMessage("Please enter an URL to connect to...") //$NON-NLS-1$ .setCancelable(true) .setPositiveButton("Open URL", new DialogInterface.OnClickListener() { //$NON-NLS-1$ public void onClick(DialogInterface dialog, int id) { dialog.cancel(); final String newURL = textView.getText().toString(); openURLasync(newURL); assert (renderer != null); service.execute(new Runnable() { @Override public void run() { addAllRecentURLs(Collections.singleton(newURL)); File tmp = new File(getRecentURLsFile() + ".tmp"); //$NON-NLS-1$ BufferedWriter w = null; try { w = new BufferedWriter(new OutputStreamWriter(new FileOutputStream(tmp), DEFAULT_ENCODING)); Collection<String> toWrite = getAllRecentURLs(); for (String s : toWrite) { w.write(s); w.write("\n"); //$NON-NLS-1$ } w.close(); w = null; if (tmp.renameTo(new File(getRecentURLsFile()))) Log.e(LOG_OSMO_TAG, "Failed to rename " + tmp + " to " + getRecentURLsFile()); //$NON-NLS-1$//$NON-NLS-2$ } catch (IOException e) { Log.e(LOG_OSMO_TAG, "Failed to write recent URLs to " + tmp, e); //$NON-NLS-1$ try { if (w != null) w.close(); } catch (IOException ex) { Log.e(LOG_OSMO_TAG, "Failed to close stream " + tmp, ex); //$NON-NLS-1$ } } } }); } }) .setNegativeButton("Cancel", new DialogInterface.OnClickListener() { //$NON-NLS-1$ public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); textView.setText("http://"); //$NON-NLS-1$ builder.setView(textView); builder.create(); builder.show(); ArrayAdapter<String> adapter; try { adapter = new ArrayAdapter<String>(this, android.R.layout.simple_dropdown_item_1line, res.get(1, TimeUnit.SECONDS)); textView.setAdapter(adapter); } catch (ExecutionException e) { // Ignored Log.e(LOG_OSMO_TAG, "Error while parsing recent URLs", e); //$NON-NLS-1$ } catch (TimeoutException e) { Log.e(LOG_OSMO_TAG, "It took too long to parse recent URLs", e); //$NON-NLS-1$ } catch (InterruptedException e) { Log.e(LOG_OSMO_TAG, "Interrupted while parsing recent URLs", e); //$NON-NLS-1$ } return true; } private final ExecutorService service = Executors.newSingleThreadExecutor(); private final Set<String> allRecentURLs = new HashSet<String>(); private synchronized void addAllRecentURLs(Collection<String> urlsToAdd) { allRecentURLs.addAll(urlsToAdd); } private synchronized Collection<String> getAllRecentURLs() { return new ArrayList<String>(allRecentURLs); } private final static Charset DEFAULT_ENCODING = Charset.forName("UTF-8"); //$NON-NLS-1$ /** * Opens a new activity to select a file * * @return true if activity has been selected */ private boolean openFileDialog() { Intent intent = new Intent("org.openintents.action.PICK_FILE"); //$NON-NLS-1$ //Intent intent = new Intent("org.openintents.action.PICK_FILE"); //$NON-NLS-1$ intent.setData(Uri.fromFile(new File(Osmo4Renderer.GPAC_CFG_DIR))); intent.putExtra("org.openintents.extra.TITLE", "Please select a file"); //$NON-NLS-1$//$NON-NLS-2$ intent.putExtra("browser_filter_extension_whitelist", OSMO_REGISTERED_FILE_EXTENSIONS); //$NON-NLS-1$ try { startActivityForResult(intent, PICK_FILE_REQUEST); return true; } catch (ActivityNotFoundException e) { AlertDialog.Builder builder = new AlertDialog.Builder(this); builder.setMessage("Impossible to find an Intent to choose a file... Cannot open file !") //$NON-NLS-1$ .setCancelable(true) .setPositiveButton("Close", new DialogInterface.OnClickListener() { //$NON-NLS-1$ public void onClick(DialogInterface dialog, int id) { dialog.cancel(); } }); AlertDialog alert = builder.create(); alert.show(); return false; } } // --------------------------------------- @Override protected void onActivityResult(int requestCode, int resultCode, Intent intent) { if (requestCode == PICK_FILE_REQUEST) { if (resultCode == RESULT_OK) { Uri uri = intent.getData(); if (uri != null) { openURLasync(uri.toString()); } } } } private void openURLasync(final String url) { Osmo4Renderer renderer = getRenderer(); runOnUiThread(new Runnable() { @Override public void run() { if (DEFAULT_OPEN_URL.equals(url)) setTitle(LOG_OSMO_TAG + " - Home"); //$NON-NLS-1$ else setTitle(LOG_OSMO_TAG + " - " + url); //$NON-NLS-1$ } }); if (renderer == null) displayPopup("Renderer should not be null", "ERROR"); //$NON-NLS-1$ //$NON-NLS-2$ else { GPACInstance i = renderer.getInstance(); if (i != null) i.connect(url); } } // --------------------------------------- @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); } // --------------------------------------- @Override public boolean onOptionsItemSelected(MenuItem item) { // Handle item selection switch (item.getItemId()) { case R.id.open_url: return openURL(); case R.id.open_file: // newGame(); return openFileDialog(); case R.id.quit: this.finish(); // quit(); return true; default: return super.onOptionsItemSelected(item); } } // --------------------------------------- @Override protected void onDestroy() { service.shutdown(); synchronized (this) { if (wl != null) wl.release(); } Osmo4Renderer renderer = getRenderer(); if (renderer != null) { GPACInstance instance = renderer.getInstance(); Log.d(LOG_OSMO_TAG, "Disconnecting instance..."); //$NON-NLS-1$ instance.disconnect(); Log.d(LOG_OSMO_TAG, "Destroying GPAC instance..."); //$NON-NLS-1$ instance.destroy(); } super.onDestroy(); } // --------------------------------------- private void loadAllModules() { Log.i(LOG_OSMO_TAG, "Start loading all modules..."); //$NON-NLS-1$ long start = System.currentTimeMillis(); byte buffer[] = new byte[1024]; int[] ids = getAllRawResources(); for (int i = 0; i < ids.length; i++) { OutputStream fos = null; InputStream ins = null; String fn = Osmo4Renderer.GPAC_MODULES_DIR + m_modules_list[i] + ".so"; //$NON-NLS-1$ File finalFile = new File(fn); // If file has already been copied, not need to do it again if (finalFile.exists() && finalFile.canRead() && fastStartup) { Log.i(LOG_OSMO_TAG, "Skipping " + finalFile); //$NON-NLS-1$ continue; } try { Log.i(LOG_OSMO_TAG, "Copying resource " + ids[i] + " to " //$NON-NLS-1$//$NON-NLS-2$ + finalFile.getAbsolutePath()); File tmpFile = new File(fn + ".tmp"); //$NON-NLS-1$ int read; ins = new BufferedInputStream(getResources().openRawResource(ids[i]), DEFAULT_BUFFER_SIZE); fos = new BufferedOutputStream(new FileOutputStream(tmpFile), DEFAULT_BUFFER_SIZE); while (0 < (read = ins.read(buffer))) { fos.write(buffer, 0, read); } ins.close(); ins = null; fos.close(); fos = null; if (!tmpFile.renameTo(finalFile)) Log.e(LOG_OSMO_TAG, "Failed to rename " + tmpFile.getAbsolutePath() + " to " //$NON-NLS-1$//$NON-NLS-2$ + finalFile.getAbsolutePath()); } catch (IOException e) { Log.e(LOG_OSMO_TAG, "IOException for resource : " + ids[i], e); //$NON-NLS-1$ } finally { if (ins != null) { try { ins.close(); } catch (IOException e) { Log.e(LOG_OSMO_TAG, "Error while closing read stream", e); //$NON-NLS-1$ } } if (fos != null) { try { fos.close(); } catch (IOException e) { Log.e(LOG_OSMO_TAG, "Error while closing write stream", e); //$NON-NLS-1$ } } } } Log.i(LOG_OSMO_TAG, "Done loading all modules, took " + (System.currentTimeMillis() - start) + "ms."); //$NON-NLS-1$ //$NON-NLS-2$ } private int[] getAllRawResources() throws RuntimeException { int[] ids = null; R.raw r = new R.raw(); java.lang.reflect.Field fields[] = R.raw.class.getDeclaredFields(); ids = new int[fields.length]; m_modules_list = new String[fields.length]; try { for (int i = 0; i < fields.length; i++) { java.lang.reflect.Field f = fields[i]; ids[i] = f.getInt(r); m_modules_list[i] = f.getName(); Log.i(LOG_OSMO_TAG, "R.raw." + f.getName() + " = 0x" + Integer.toHexString(ids[i])); //$NON-NLS-1$ //$NON-NLS-2$ } } catch (IllegalArgumentException e) { throw new RuntimeException(e); } catch (IllegalAccessException e) { throw new RuntimeException(e); } return ids; } // --------------------------------------- private String lastDisplayedMessage; private void displayPopup(String message, String title) { final String fullMsg = title + '\n' + message; synchronized (this) { if (fullMsg.equals(lastDisplayedMessage)) return; lastDisplayedMessage = fullMsg; } runOnUiThread(new Runnable() { @Override public void run() { Toast toast = Toast.makeText(Osmo4.this, fullMsg, Toast.LENGTH_SHORT); toast.show(); } }); } /** * @see com.artemis.Osmo4.GpacCallback#displayMessage(String, String, int) */ @Override public void displayMessage(final String message, final String title, final int status) { if (status == GF_Err.GF_OK.value) displayPopup(message, title); else { runOnUiThread(new Runnable() { @Override public void run() { StringBuilder sb = new StringBuilder(); sb.append(GF_Err.getError(status)); sb.append(' '); sb.append(title); AlertDialog.Builder builder = new AlertDialog.Builder(Osmo4.this); builder.setTitle(sb.toString()); sb.append('\n'); sb.append(message); builder.setMessage(sb.toString()); builder.setCancelable(true); builder.setPositiveButton(OK_BUTTON, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } }); } } /** * @see com.artemis.Osmo4.GpacCallback#log(int, int, java.lang.String, java.lang.Object[]) */ @Override public void log(int level, int module, String message, Object... arguments) { } /** * @see com.artemis.Osmo4.GpacCallback#onProgress(java.lang.String, int, int) */ @Override public void onProgress(final String msg, final int done, final int total) { runOnUiThread(new Runnable() { @Override public void run() { int progress = done * 10000 / total; if (progress > 9900) progress = 10000; setProgress(progress); } }); } /** * @see com.artemis.Osmo4.GpacCallback#onGPACReady() */ @Override public void onGPACReady() { Log.i(LOG_OSMO_TAG, "GPAC is ready"); //$NON-NLS-1$ runOnUiThread(new Runnable() { @Override public void run() { setProgress(10000); } }); } /** * @see com.artemis.Osmo4.GpacCallback#onGPACError(java.lang.Throwable) */ @Override public void onGPACError(final Throwable e) { Log.e(LOG_OSMO_TAG, "GPAC Error", e); //$NON-NLS-1$ runOnUiThread(new Runnable() { @Override public void run() { StringBuilder sb = new StringBuilder(); sb.append("Failed to init GPAC due to "); //$NON-NLS-1$ sb.append(e.getClass().getSimpleName()); AlertDialog.Builder builder = new AlertDialog.Builder(Osmo4.this); builder.setTitle(sb.toString()); sb.append('\n'); sb.append("Description: "); //$NON-NLS-1$ sb.append(e.getLocalizedMessage()); sb.append('\n'); sb.append("Revision: $Revision$"); //$NON-NLS-1$ builder.setMessage(sb.toString()); builder.setCancelable(true); builder.setPositiveButton(OK_BUTTON, new OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.cancel(); } }); builder.create().show(); } }); } }
Fixed possible division by 0 git-svn-id: ab66a9de07fa9d47c5829c82992f5279466c775f@2825 63c20433-aa62-49bd-875c-5a186b69a8fb
applications/osmo4_android/src/com/artemis/Osmo4/Osmo4.java
Fixed possible division by 0
Java
lgpl-2.1
a25be77a43a1233bcc517a23494d955f40f0defb
0
julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine,julie-sullivan/phytomine
package org.intermine.web.logic.results; /* * Copyright (C) 2002-2008 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.AbstractList; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.intermine.objectstore.query.ResultsRow; import org.intermine.objectstore.flatouterjoins.MultiRow; import org.intermine.util.TypeUtil; import org.intermine.web.logic.Constants; /** * A pageable and configurable table of data. * * @author Andrew Varley * @author Kim Rutherford */ public class PagedTable { private static final int FIRST_SELECTED_FIELDS_COUNT = 25; private WebTable webTable; private List<String> columnNames = null; private List resultElementRows = null; private int startRow = 0; private int pageSize = Constants.DEFAULT_TABLE_SIZE; private List<Column> columns; private String tableid; private List<List<Object>> rows = null; // object ids that have been selected in the table // TODO this may be more memory efficient with an IntPresentSet // note: if allSelected != -1 then this map contains those objects that are NOT selected private Map<Integer, String> selectionIds = new HashMap<Integer, String>(); // the index of the column the has all checkbox checked private int allSelected = -1; private String selectedClass; /** * Construct a PagedTable with a list of column names * @param webTable the WebTable that this PagedTable will display */ public PagedTable(WebTable webTable) { super(); this.webTable = webTable; } /** * Construct a PagedTable with a list of column names * @param webTable the WebTable that this PagedTable will display * @param pageSize the number of records to show on each page. Default value is 10. */ public PagedTable(WebTable webTable, int pageSize) { super(); this.webTable = webTable; this.pageSize = pageSize; } /** * Get the list of column configurations * * @return the List of columns in the order they are to be displayed */ public List<Column> getColumns() { return Collections.unmodifiableList(getColumnsInternal()); } private List<Column> getColumnsInternal() { if (columns == null) { columns = webTable.getColumns(); } return columns; } /** * Return the column names * @return the column names */ public List<String> getColumnNames() { if (columnNames == null) { columnNames = new ArrayList<String>(); Iterator<Column> iter = getColumns().iterator(); while (iter.hasNext()) { String columnName = iter.next().getName(); columnNames.add(columnName); } } return columnNames; } /** * Return the number of visible columns. Used by JSP pages. * @return the number of visible columns. */ public int getVisibleColumnCount() { int count = 0; for (Iterator<Column> i = getColumnsInternal().iterator(); i.hasNext();) { Column obj = i.next(); if (obj.isVisible()) { count++; } } return count; } /** * Move a column left * * @param index the index of the column to move */ public void moveColumnLeft(int index) { if (index > 0 && index <= getColumnsInternal().size() - 1) { getColumnsInternal().add(index - 1, getColumnsInternal().remove(index)); } } /** * Move a column right * * @param index the index of the column to move */ public void moveColumnRight(int index) { if (index >= 0 && index < getColumnsInternal().size() - 1) { getColumnsInternal().add(index + 1, getColumnsInternal().remove(index)); } } /** * Set the page size of the table * * @param pageSize the page size */ public void setPageSize(int pageSize) { this.pageSize = pageSize; startRow = (startRow / pageSize) * pageSize; updateResultElementRows(); } /** * Get the page size of the current page * * @return the page size */ public int getPageSize() { return pageSize; } /** * Get the index of the first row of this page * @return the index */ public int getStartRow() { return startRow; } /** * Get the page index. * @return current page index */ public int getPage() { return (startRow / pageSize); } /** * Set the page size and page together. * * @param page page number * @param size page size */ public void setPageAndPageSize(int page, int size) { this.pageSize = size; this.startRow = size * page; updateResultElementRows(); } /** * Get the index of the last row of this page * @return the index */ public int getEndRow() { return startRow + getResultElementRows().size() - 1; } /** * Go to the first page */ public void firstPage() { startRow = 0; updateResultElementRows(); } /** * Check if were are on the first page * @return true if we are on the first page */ public boolean isFirstPage() { return (startRow == 0); } /** * Go to the last page */ public void lastPage() { startRow = ((getExactSize() - 1) / pageSize) * pageSize; updateResultElementRows(); } /** * Check if we are on the last page * @return true if we are on the last page */ public boolean isLastPage() { return (!isSizeEstimate() && getEndRow() == getEstimatedSize() - 1); } /** * Go to the previous page */ public void previousPage() { if (startRow >= pageSize) { startRow -= pageSize; } updateResultElementRows(); } /** * Go to the next page */ public void nextPage() { startRow += pageSize; updateResultElementRows(); } /** * Return the currently visible rows of the table as a List of Lists of ResultElement objects. * @return the resultElementRows of the table */ public List<List<Object>> getRows() { if (rows == null) { updateRows(); } return rows; } /** * Return the currently visible rows of the table as a List of Lists of raw values/Objects. * @return the ResultElement of the table as rows */ public List<List<ResultElement>> getResultElementRows() { if (resultElementRows == null) { updateResultElementRows(); } return resultElementRows; } /** * Return all the resultElementRows of the table as a List of Lists. * * @return all the resultElementRows of the table */ public WebTable getAllRows() { return webTable; } /** * Get the (possibly estimated) number of resultElementRows of this table * @return the number of resultElementRows */ public int getEstimatedSize() { return webTable.getEstimatedSize(); } /** * Check whether the result of getSize is an estimate * @return true if the size is an estimate */ public boolean isSizeEstimate() { return webTable.isSizeEstimate(); } /** * Get the exact number of resultElementRows of this table * @return the number of resultElementRows */ public int getExactSize() { return webTable.size(); } /** * Add an object id and its field value * that has been selected in the table. * @param objectId the id to select */ public void selectId(Integer objectId) { if (allSelected == -1) { ResultElement resultElement = findIdInVisible(objectId); if (resultElement != null) { selectionIds.put(objectId, resultElement.getField().toString()); } } else { // remove because the all checkbox is on selectionIds.remove(objectId); } } /** * Remove the object with the given object id from the list of selected objects. * @param objectId the object store id */ public void deSelectId(Integer objectId) { if (allSelected == -1) { selectionIds.remove(objectId); } else { // add because the all checkbox is on ResultElement resultElement = findIdInVisible(objectId); if (resultElement != null) { selectionIds.put(objectId, resultElement.getField().toString()); } } } /** * Search the visible rows and return the first ResultElement with the given ID./ */ private ResultElement findIdInVisible(Integer id) { for (List<ResultElement> resultElements: getResultElementRows()) { for (ResultElement resultElement : resultElements) { if ((resultElement != null) && (resultElement.getId().equals(id)) && (resultElement.isKeyField())) { return resultElement; } } } return null; } /** * Return the fields for the first selected objects. Return the first * FIRST_SELECTED_FIELDS_COUNT fields. If there are more than that, append "..." * @return the list */ public List<String> getFirstSelectedFields() { List<String> retList = new ArrayList<String>(); Iterator<SelectionEntry> selectedEntryIter = selectedEntryIterator(); while (selectedEntryIter.hasNext()) { if (retList.size() < FIRST_SELECTED_FIELDS_COUNT) { retList.add(selectedEntryIter.next().fieldName); } else { retList.add("..."); break; } } return retList; } /** * Return selected object ids of the current page as a String[], needed for jsp multibox. * @return selected ids as Strings. */ public String[] getCurrentSelectedIdStrings() { return getCurrentSelectedIdStringsList().toArray(new String[0]); } /** * Return selected object ids of the current page as a List. * @return the list. */ public List<String> getCurrentSelectedIdStringsList() { List<String> selected = new ArrayList<String>(); if (allSelected == -1) { if (!selectionIds.isEmpty()) { for (List<ResultElement> currentRow: getResultElementRows()) { for (ResultElement resElt: currentRow) { if (resElt != null) { if (selectionIds.containsKey(resElt.getId())) { selected.add(resElt.getId().toString()); } } } } } } else { for (List<ResultElement> currentRow: getResultElementRows()) { ResultElement resElt = currentRow.get(allSelected); if (resElt != null) { if (!selectionIds.containsKey(resElt.getId())) { selected.add(resElt.getId().toString()); } } } } return selected; } /** * Clear the table selection */ public void clearSelectIds() { selectionIds = new LinkedHashMap<Integer, String>(); allSelected = -1; } private class SelectionEntry { Integer id; String fieldName; } /** * Return an Iterator over the selected id/fieldname pairs */ private Iterator<SelectionEntry> selectedEntryIterator() { if (allSelected == -1) { return new Iterator<SelectionEntry>() { Iterator<Map.Entry<Integer, String>> selectionIter = selectionIds.entrySet().iterator(); public boolean hasNext() { return selectionIter.hasNext(); } public SelectionEntry next() { SelectionEntry retEntry = new SelectionEntry(); Map.Entry<Integer, String> entry = selectionIter.next(); retEntry.id = entry.getKey(); retEntry.fieldName = entry.getValue(); return retEntry; } public void remove() { throw new UnsupportedOperationException(); } }; } else { return new Iterator<SelectionEntry>() { SelectionEntry nextEntry = null; int currentIndex = 0; { moveToNext(); } private void moveToNext() { while (true) { try { List<ResultElement> row = getAllRows().getResultElements(currentIndex); ResultElement element = row.get(allSelected); Integer elementId = element.getId(); if (!selectionIds.containsKey(elementId)) { nextEntry = new SelectionEntry(); nextEntry.id = elementId; nextEntry.fieldName = element.getField().toString(); break; } } catch (IndexOutOfBoundsException e) { nextEntry = null; break; } finally { currentIndex++; } } } public boolean hasNext() { return nextEntry != null; } public SelectionEntry next() { SelectionEntry retVal = nextEntry; moveToNext(); return retVal; } public void remove() { throw new UnsupportedOperationException(); } }; } } /** * Return an Iterator over the selected Ids * @return the Iterator */ public Iterator<Integer> selectedIdsIterator() { return new Iterator<Integer>() { Iterator<SelectionEntry> selectedEntryIter = selectedEntryIterator(); public boolean hasNext() { return selectedEntryIter.hasNext(); } public Integer next() { return selectedEntryIter.next().id; } public void remove() { throw new UnsupportedOperationException(); } }; } /** * If a whole column is selected, return its index, otherwise return -1. * @return the index of the column that is selected */ public int getAllSelectedColumn() { if (selectionIds.isEmpty()) { return allSelected; } else { return -1; } } /** * Select a whole column. * @param columnSelected the column index */ public void setAllSelectedColumn(int columnSelected) { if (columnSelected == -1) { selectedClass = null; } else { Class<?> columnClass = getAllRows().getColumns().get(columnSelected).getType(); selectedClass = TypeUtil.unqualifiedName(columnClass.getName()); } this.allSelected = columnSelected; } /** * @return the selectedClass */ public String getSelectedClass() { return selectedClass; } /** * @param selectedClass the selectedClass to set */ public void setSelectedClass(String selectedClass) { this.selectedClass = selectedClass; } /** * Set the rows fields to be a List of Lists of values from ResultElement objects from * getResultElementRows(). */ private void updateRows() { rows = new ArrayList<List<Object>>(); for (int i = getStartRow(); i < getStartRow() + getPageSize(); i++) { try { List<Object> newRow = getAllRows().get(i); rows.add(newRow); } catch (IndexOutOfBoundsException e) { // we're probably at the end of the results object, so stop looping break; } } } /** * Update the internal row list */ private void updateResultElementRows() { List<List<ResultElement>> newRows = new ArrayList<List<ResultElement>>(); String invalidStartMessage = "Invalid start row of table: " + getStartRow(); if (getStartRow() < 0) { throw new PageOutOfRangeException(invalidStartMessage); } try { if (getStartRow() == 0) { // no problem - 0 is always valid } else { getAllRows().getResultElements(getStartRow()); } } catch (IndexOutOfBoundsException e) { throw new PageOutOfRangeException(invalidStartMessage); } for (int i = getStartRow(); i < getStartRow() + getPageSize(); i++) { try { List<ResultElement> resultsRow = getAllRows().getResultElements(i); // if some objects already selected, set corresponding ResultElements here if (!selectionIds.isEmpty()) { for (ResultElement re : resultsRow) { if (re != null && selectionIds.keySet().contains(re.getId())) { re.setSelected(true); } } } newRows.add(resultsRow); } catch (IndexOutOfBoundsException e) { // we're probably at the end of the results object, so stop looping break; } } this.resultElementRows = newRows; // clear so that getRows() recreates it this.rows = null; } /** * Return the maximum retrievable index for this PagedTable. This will only ever return less * than getExactSize() if the underlying data source has a restriction on the maximum index * that can be retrieved. * @return the maximum retrieved index */ public int getMaxRetrievableIndex() { return webTable.getMaxRetrievableIndex(); } /** * Return the class from the data model for the data displayed in indexed column. * This may be the parent class of a field e.g. if column displays A.field where * field is a String and A is a class in the model this method will return A. * @param index of column to find type for * @return the class or parent class for the indexed column */ public Class<?> getTypeForColumn(int index) { return webTable.getColumns().get(index).getType(); } /** * Set the column names * @param columnNames a list of Strings */ public void setColumnNames(List<String> columnNames) { this.columnNames = columnNames; } /** * @return the webTable */ public WebTable getWebTable() { return webTable; } /** * @return the tableid */ public String getTableid() { return tableid; } /** * @param tableid the tableid to set */ public void setTableid(String tableid) { this.tableid = tableid; } /** * Returns indexes of columns, that should be displayed. * @return indexes */ public List<Integer> getVisibleIndexes() { List<Integer> ret = new ArrayList<Integer>(); for (int i = 0; i < getColumns().size(); i++) { if (getColumns().get(i) != null && getColumns().get(i).isVisible()) { ret.add(getColumns().get(i).getIndex()); } } return ret; } /** * Returns a List containing the results, with the columns rearranged. * * @return a List of rows, each of which is a List */ public List getRearrangedResults() { return new RearrangedList(); } private class RearrangedList extends AbstractList { private List<Integer> visibleIndexes; public RearrangedList() { visibleIndexes = getVisibleIndexes(); } @Override public List get(int index) { return translateRow(webTable.getResultElements(index)); } private List translateRow(List row) { if (row instanceof MultiRow) { MultiRow ret = new MultiRow(); for (List subRow : ((List<List>) row)) { ret.add(translateRow(subRow)); } return ret; } List ret = new ResultsRow(); for (int i = 0; i < visibleIndexes.size(); i++) { ret.add(row.get(visibleIndexes.get(i))); } return ret; } @Override public int size() { return webTable.size(); } @Override public Iterator iterator() { return new Iter(); } private class Iter implements Iterator { private Iterator subIter = webTable.iterator(); public boolean hasNext() { return subIter.hasNext(); } public Object next() { List originalRow = (List) subIter.next(); return translateRow(originalRow); } public void remove() { throw (new UnsupportedOperationException()); } } } /** * Return true if and only if nothing is selected * @return true if and only if nothing is selected */ public boolean isEmptySelection() { return !selectedIdsIterator().hasNext(); } }
intermine/web/main/src/org/intermine/web/logic/results/PagedTable.java
package org.intermine.web.logic.results; /* * Copyright (C) 2002-2008 FlyMine * * This code may be freely distributed and modified under the * terms of the GNU Lesser General Public Licence. This should * be distributed with the code. See the LICENSE file for more * information or http://www.gnu.org/copyleft/lesser.html. * */ import java.util.AbstractList; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import org.intermine.objectstore.query.ResultsRow; import org.intermine.objectstore.flatouterjoins.MultiRow; import org.intermine.util.TypeUtil; import org.intermine.web.logic.Constants; /** * A pageable and configurable table of data. * * @author Andrew Varley * @author Kim Rutherford */ public class PagedTable { private static final int FIRST_SELECTED_FIELDS_COUNT = 25; private WebTable webTable; private List<String> columnNames = null; private List resultElementRows = null; private int startRow = 0; private int pageSize = Constants.DEFAULT_TABLE_SIZE; private List<Column> columns; private String tableid; private List<List<Object>> rows = null; // object ids that have been selected in the table // TODO this may be more memory efficient with an IntPresentSet // note: if allSelected != -1 then this map contains those objects that are NOT selected private Map<Integer, String> selectionIds = new HashMap<Integer, String>(); // the index of the column the has all checkbox checked private int allSelected = -1; private String selectedClass; /** * Construct a PagedTable with a list of column names * @param webTable the WebTable that this PagedTable will display */ public PagedTable(WebTable webTable) { super(); this.webTable = webTable; } /** * Construct a PagedTable with a list of column names * @param webTable the WebTable that this PagedTable will display * @param pageSize the number of records to show on each page. Default value is 10. */ public PagedTable(WebTable webTable, int pageSize) { super(); this.webTable = webTable; this.pageSize = pageSize; } /** * Get the list of column configurations * * @return the List of columns in the order they are to be displayed */ public List<Column> getColumns() { return Collections.unmodifiableList(getColumnsInternal()); } private List<Column> getColumnsInternal() { if (columns == null) { columns = webTable.getColumns(); } return columns; } /** * Return the column names * @return the column names */ public List<String> getColumnNames() { if (columnNames == null) { columnNames = new ArrayList<String>(); Iterator<Column> iter = getColumns().iterator(); while (iter.hasNext()) { String columnName = iter.next().getName(); columnNames.add(columnName); } } return columnNames; } /** * Return the number of visible columns. Used by JSP pages. * @return the number of visible columns. */ public int getVisibleColumnCount() { int count = 0; for (Iterator<Column> i = getColumnsInternal().iterator(); i.hasNext();) { Column obj = i.next(); if (obj.isVisible()) { count++; } } return count; } /** * Move a column left * * @param index the index of the column to move */ public void moveColumnLeft(int index) { if (index > 0 && index <= getColumnsInternal().size() - 1) { getColumnsInternal().add(index - 1, getColumnsInternal().remove(index)); } } /** * Move a column right * * @param index the index of the column to move */ public void moveColumnRight(int index) { if (index >= 0 && index < getColumnsInternal().size() - 1) { getColumnsInternal().add(index + 1, getColumnsInternal().remove(index)); } } /** * Set the page size of the table * * @param pageSize the page size */ public void setPageSize(int pageSize) { this.pageSize = pageSize; startRow = (startRow / pageSize) * pageSize; updateResultElementRows(); } /** * Get the page size of the current page * * @return the page size */ public int getPageSize() { return pageSize; } /** * Get the index of the first row of this page * @return the index */ public int getStartRow() { return startRow; } /** * Get the page index. * @return current page index */ public int getPage() { return (startRow / pageSize); } /** * Set the page size and page together. * * @param page page number * @param size page size */ public void setPageAndPageSize(int page, int size) { this.pageSize = size; this.startRow = size * page; updateResultElementRows(); } /** * Get the index of the last row of this page * @return the index */ public int getEndRow() { return startRow + getResultElementRows().size() - 1; } /** * Go to the first page */ public void firstPage() { startRow = 0; updateResultElementRows(); } /** * Check if were are on the first page * @return true if we are on the first page */ public boolean isFirstPage() { return (startRow == 0); } /** * Go to the last page */ public void lastPage() { startRow = ((getExactSize() - 1) / pageSize) * pageSize; updateResultElementRows(); } /** * Check if we are on the last page * @return true if we are on the last page */ public boolean isLastPage() { return (!isSizeEstimate() && getEndRow() == getEstimatedSize() - 1); } /** * Go to the previous page */ public void previousPage() { if (startRow >= pageSize) { startRow -= pageSize; } updateResultElementRows(); } /** * Go to the next page */ public void nextPage() { startRow += pageSize; updateResultElementRows(); } /** * Return the currently visible rows of the table as a List of Lists of ResultElement objects. * @return the resultElementRows of the table */ public List<List<Object>> getRows() { if (rows == null) { updateRows(); } return rows; } /** * Return the currently visible rows of the table as a List of Lists of raw values/Objects. * @return the ResultElement of the table as rows */ public List<List<ResultElement>> getResultElementRows() { if (resultElementRows == null) { updateResultElementRows(); } return resultElementRows; } /** * Return all the resultElementRows of the table as a List of Lists. * * @return all the resultElementRows of the table */ public WebTable getAllRows() { return webTable; } /** * Get the (possibly estimated) number of resultElementRows of this table * @return the number of resultElementRows */ public int getEstimatedSize() { return webTable.getEstimatedSize(); } /** * Check whether the result of getSize is an estimate * @return true if the size is an estimate */ public boolean isSizeEstimate() { return webTable.isSizeEstimate(); } /** * Get the exact number of resultElementRows of this table * @return the number of resultElementRows */ public int getExactSize() { return webTable.size(); } /** * Add an object id and its field value * that has been selected in the table. * @param objectId the id to select */ public void selectId(Integer objectId) { if (allSelected == -1) { ResultElement resultElement = findIdInVisible(objectId); if (resultElement != null) { selectionIds.put(objectId, resultElement.getField().toString()); } } else { // remove because the all checkbox is on selectionIds.remove(objectId); } } /** * Remove the object with the given object id from the list of selected objects. * @param objectId the object store id */ public void deSelectId(Integer objectId) { if (allSelected == -1) { selectionIds.remove(objectId); } else { // add because the all checkbox is on ResultElement resultElement = findIdInVisible(objectId); if (resultElement != null) { selectionIds.put(objectId, resultElement.getField().toString()); } } } /** * Search the visible rows and return the first ResultElement with the given ID./ */ private ResultElement findIdInVisible(Integer id) { for (List<ResultElement> resultElements: getResultElementRows()) { for (ResultElement resultElement : resultElements) { if ((resultElement != null) && (resultElement.getId().equals(id)) && (resultElement.isKeyField())) { return resultElement; } } } return null; } /** * Return the fields for the first selected objects. Return the first * FIRST_SELECTED_FIELDS_COUNT fields. If there are more than that, append "..." * @return the list */ public List<String> getFirstSelectedFields() { List<String> retList = new ArrayList<String>(); Iterator<SelectionEntry> selectedEntryIter = selectedEntryIterator(); while (selectedEntryIter.hasNext()) { if (retList.size() < FIRST_SELECTED_FIELDS_COUNT) { retList.add(selectedEntryIter.next().fieldName); } else { retList.add("..."); break; } } return retList; } /** * Return selected object ids of the current page as a String[], needed for jsp multibox. * @return selected ids as Strings. */ public String[] getCurrentSelectedIdStrings() { return getCurrentSelectedIdStringsList().toArray(new String[0]); } /** * Return selected object ids of the current page as a List. * @return the list. */ public List<String> getCurrentSelectedIdStringsList() { List<String> selected = new ArrayList<String>(); if (allSelected == -1) { if (!selectionIds.isEmpty()) { for (List<ResultElement> currentRow: getResultElementRows()) { for (ResultElement resElt: currentRow) { if (resElt != null) { if (selectionIds.containsKey(resElt.getId())) { selected.add(resElt.getId().toString()); } } } } } } else { for (List<ResultElement> currentRow: getResultElementRows()) { ResultElement resElt = currentRow.get(allSelected); if (resElt != null) { if (!selectionIds.containsKey(resElt.getId())) { selected.add(resElt.getId().toString()); } } } } return selected; } /** * Clear the table selection */ public void clearSelectIds() { selectionIds = new LinkedHashMap<Integer, String>(); allSelected = -1; } private class SelectionEntry { Integer id; String fieldName; } /** * Return an Iterator over the selected id/fieldname pairs */ private Iterator<SelectionEntry> selectedEntryIterator() { if (allSelected == -1) { return new Iterator<SelectionEntry>() { Iterator<Map.Entry<Integer, String>> selectionIter = selectionIds.entrySet().iterator(); public boolean hasNext() { return selectionIter.hasNext(); } public SelectionEntry next() { SelectionEntry retEntry = new SelectionEntry(); Map.Entry<Integer, String> entry = selectionIter.next(); retEntry.id = entry.getKey(); retEntry.fieldName = entry.getValue(); return retEntry; } public void remove() { throw new UnsupportedOperationException(); } }; } else { return new Iterator<SelectionEntry>() { SelectionEntry nextEntry = null; int currentIndex = 0; { moveToNext(); } private void moveToNext() { while (true) { try { List<ResultElement> row = getAllRows().getResultElements(currentIndex); ResultElement element = row.get(allSelected); Integer elementId = element.getId(); if (!selectionIds.containsKey(elementId)) { nextEntry = new SelectionEntry(); nextEntry.id = elementId; nextEntry.fieldName = element.getField().toString(); break; } } catch (IndexOutOfBoundsException e) { nextEntry = null; break; } finally { currentIndex++; } } } public boolean hasNext() { return nextEntry != null; } public SelectionEntry next() { SelectionEntry retVal = nextEntry; moveToNext(); return retVal; } public void remove() { throw new UnsupportedOperationException(); } }; } } /** * Return an Iterator over the selected Ids * @return the Iterator */ public Iterator<Integer> selectedIdsIterator() { return new Iterator<Integer>() { Iterator<SelectionEntry> selectedEntryIter = selectedEntryIterator(); public boolean hasNext() { return selectedEntryIter.hasNext(); } public Integer next() { return selectedEntryIter.next().id; } public void remove() { throw new UnsupportedOperationException(); } }; } /** * If a whole column is selected, return its index, otherwise return -1. * @return the index of the column that is selected */ public int getAllSelectedColumn() { if (selectionIds.isEmpty()) { return allSelected; } else { return -1; } } /** * Select a whole column. * @param columnSelected the column index */ public void setAllSelectedColumn(int columnSelected) { if (columnSelected == -1) { selectedClass = null; } else { Class<?> columnClass = getAllRows().getColumns().get(columnSelected).getClass(); selectedClass = TypeUtil.unqualifiedName(columnClass.getName()); } this.allSelected = columnSelected; } /** * @return the selectedClass */ public String getSelectedClass() { return selectedClass; } /** * @param selectedClass the selectedClass to set */ public void setSelectedClass(String selectedClass) { this.selectedClass = selectedClass; } /** * Set the rows fields to be a List of Lists of values from ResultElement objects from * getResultElementRows(). */ private void updateRows() { rows = new ArrayList<List<Object>>(); for (int i = getStartRow(); i < getStartRow() + getPageSize(); i++) { try { List<Object> newRow = getAllRows().get(i); rows.add(newRow); } catch (IndexOutOfBoundsException e) { // we're probably at the end of the results object, so stop looping break; } } } /** * Update the internal row list */ private void updateResultElementRows() { List<List<ResultElement>> newRows = new ArrayList<List<ResultElement>>(); String invalidStartMessage = "Invalid start row of table: " + getStartRow(); if (getStartRow() < 0) { throw new PageOutOfRangeException(invalidStartMessage); } try { if (getStartRow() == 0) { // no problem - 0 is always valid } else { getAllRows().getResultElements(getStartRow()); } } catch (IndexOutOfBoundsException e) { throw new PageOutOfRangeException(invalidStartMessage); } for (int i = getStartRow(); i < getStartRow() + getPageSize(); i++) { try { List<ResultElement> resultsRow = getAllRows().getResultElements(i); // if some objects already selected, set corresponding ResultElements here if (!selectionIds.isEmpty()) { for (ResultElement re : resultsRow) { if (re != null && selectionIds.keySet().contains(re.getId())) { re.setSelected(true); } } } newRows.add(resultsRow); } catch (IndexOutOfBoundsException e) { // we're probably at the end of the results object, so stop looping break; } } this.resultElementRows = newRows; // clear so that getRows() recreates it this.rows = null; } /** * Return the maximum retrievable index for this PagedTable. This will only ever return less * than getExactSize() if the underlying data source has a restriction on the maximum index * that can be retrieved. * @return the maximum retrieved index */ public int getMaxRetrievableIndex() { return webTable.getMaxRetrievableIndex(); } /** * Return the class from the data model for the data displayed in indexed column. * This may be the parent class of a field e.g. if column displays A.field where * field is a String and A is a class in the model this method will return A. * @param index of column to find type for * @return the class or parent class for the indexed column */ public Class<?> getTypeForColumn(int index) { return webTable.getColumns().get(index).getType(); } /** * Set the column names * @param columnNames a list of Strings */ public void setColumnNames(List<String> columnNames) { this.columnNames = columnNames; } /** * @return the webTable */ public WebTable getWebTable() { return webTable; } /** * @return the tableid */ public String getTableid() { return tableid; } /** * @param tableid the tableid to set */ public void setTableid(String tableid) { this.tableid = tableid; } /** * Returns indexes of columns, that should be displayed. * @return indexes */ public List<Integer> getVisibleIndexes() { List<Integer> ret = new ArrayList<Integer>(); for (int i = 0; i < getColumns().size(); i++) { if (getColumns().get(i) != null && getColumns().get(i).isVisible()) { ret.add(getColumns().get(i).getIndex()); } } return ret; } /** * Returns a List containing the results, with the columns rearranged. * * @return a List of rows, each of which is a List */ public List getRearrangedResults() { return new RearrangedList(); } private class RearrangedList extends AbstractList { private List<Integer> visibleIndexes; public RearrangedList() { visibleIndexes = getVisibleIndexes(); } @Override public List get(int index) { return translateRow(webTable.getResultElements(index)); } private List translateRow(List row) { if (row instanceof MultiRow) { MultiRow ret = new MultiRow(); for (List subRow : ((List<List>) row)) { ret.add(translateRow(subRow)); } return ret; } List ret = new ResultsRow(); for (int i = 0; i < visibleIndexes.size(); i++) { ret.add(row.get(visibleIndexes.get(i))); } return ret; } @Override public int size() { return webTable.size(); } @Override public Iterator iterator() { return new Iter(); } private class Iter implements Iterator { private Iterator subIter = webTable.iterator(); public boolean hasNext() { return subIter.hasNext(); } public Object next() { List originalRow = (List) subIter.next(); return translateRow(originalRow); } public void remove() { throw (new UnsupportedOperationException()); } } } /** * Return true if and only if nothing is selected * @return true if and only if nothing is selected */ public boolean isEmptySelection() { return !selectedIdsIterator().hasNext(); } }
Fixed PagedTable.setAllSelectColumn to set the right class name for the column. Former-commit-id: c7a85ee25926c33aa37c8d4307b5e8af2574caf6
intermine/web/main/src/org/intermine/web/logic/results/PagedTable.java
Fixed PagedTable.setAllSelectColumn to set the right class name for the column.
Java
lgpl-2.1
aaf754b6663aaf8cb94887fe784c504e6e9f2b33
0
languagetool-org/languagetool,jimregan/languagetool,jimregan/languagetool,languagetool-org/languagetool,jimregan/languagetool,languagetool-org/languagetool,jimregan/languagetool,jimregan/languagetool,languagetool-org/languagetool,languagetool-org/languagetool
/* LanguageTool, a natural language style checker * Copyright (C) 2018 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.rules.de; import org.languagetool.AnalyzedSentence; import org.languagetool.AnalyzedTokenReadings; import org.languagetool.JLanguageTool; import org.languagetool.databroker.ResourceDataBroker; import org.languagetool.language.GermanyGerman; import org.languagetool.languagemodel.BaseLanguageModel; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import java.io.IOException; import java.io.InputStream; import java.util.*; import static org.languagetool.tools.StringTools.*; /** * Find compounds that might be morphologically correct but are still probably wrong, like 'Lehrzeile'. * @since 4.1 */ public class ProhibitedCompoundRule extends Rule { // have objects static for better performance (rule gets initialized for every check) private static final List<Pair> lowercasePairs = Arrays.asList( // NOTE: words here must be all-lowercase // NOTE: no need to add words from confusion_sets.txt, they will be used automatically (if starting with uppercase char) new Pair("abschluss", "Ende", "Abschuss", "Vorgang des Abschießens, z.B. mit einer Waffe"), new Pair("brache", "verlassenes Grundstück", "branche", "Wirtschaftszweig"), new Pair("wieder", "erneut, wiederholt, nochmal (Wiederholung, Wiedervorlage, ...)", "wider", "gegen, entgegen (Widerwille, Widerstand, Widerspruch, ...)"), new Pair("leer", "ohne Inhalt", "lehr", "bezogen auf Ausbildung und Wissen"), new Pair("Gewerbe", "wirtschaftliche Tätigkeit", "Gewebe", "gewebter Stoff; Verbund ähnlicher Zellen"), new Pair("Schuh", "Fußbekleidung", "Schul", "auf die Schule bezogen"), new Pair("klima", "langfristige Wetterzustände", "lima", "Hauptstadt von Peru"), new Pair("modell", "vereinfachtes Abbild der Wirklichkeit", "model", "Fotomodell"), new Pair("spitze", "spitzes Ende eines Gegenstandes", "spritze", "medizinisches Instrument zur Injektion") ); private static final GermanSpellerRule spellerRule = new GermanSpellerRule(JLanguageTool.getMessageBundle(), new GermanyGerman()); private static final List<String> ignoreWords = Arrays.asList("Die", "De"); private static final List<Pair> pairs = new ArrayList<>(); static { addUpperCaseVariants(); addItemsFromConfusionSets(); } private static void addUpperCaseVariants() { for (Pair lcPair : lowercasePairs) { pairs.add(new Pair(lcPair.part1, lcPair.part1Desc, lcPair.part2, lcPair.part2Desc)); String ucPart1 = uppercaseFirstChar(lcPair.part1); String ucPart2 = uppercaseFirstChar(lcPair.part2); if (!lcPair.part1.equals(ucPart1) || !lcPair.part2.equals(ucPart2)) { pairs.add(new Pair(ucPart1, lcPair.part1Desc, ucPart2, lcPair.part2Desc)); } } } private static void addItemsFromConfusionSets() { try { ResourceDataBroker dataBroker = JLanguageTool.getDataBroker(); try (InputStream confusionSetStream = dataBroker.getFromResourceDirAsStream("/de/confusion_sets.txt")) { ConfusionSetLoader loader = new ConfusionSetLoader(); Map<String, List<ConfusionSet>> confusionSet = loader.loadConfusionSet(confusionSetStream); for (Map.Entry<String, List<ConfusionSet>> entry : confusionSet.entrySet()) { for (ConfusionSet set : entry.getValue()) { boolean allUpper = set.getSet().stream().allMatch(k -> startsWithUppercase(k.getString()) && !ignoreWords.contains(k.getString())); if (allUpper) { Set<ConfusionString> cSet = set.getSet(); if (cSet.size() != 2) { throw new RuntimeException("Got confusion set with != 2 items: " + cSet); } Iterator<ConfusionString> it = cSet.iterator(); ConfusionString part1 = it.next(); ConfusionString part2 = it.next(); pairs.add(new Pair(part1.getString(), part1.getDescription(), part2.getString(), part2.getDescription())); pairs.add(new Pair(lowercaseFirstChar(part1.getString()), part1.getDescription(), lowercaseFirstChar(part2.getString()), part2.getDescription())); } } } } } catch (IOException e) { throw new RuntimeException(e); } } private final BaseLanguageModel lm; public ProhibitedCompoundRule(ResourceBundle messages, LanguageModel lm) { this.lm = (BaseLanguageModel) Objects.requireNonNull(lm); super.setCategory(Categories.TYPOS.getCategory(messages)); } @Override public String getId() { return "DE_PROHIBITED_COMPOUNDS"; } @Override public String getDescription() { return "Markiert wahrscheinlich falsche Komposita wie 'Lehrzeile', wenn 'Leerzeile' häufiger vorkommt."; } @Override public RuleMatch[] match(AnalyzedSentence sentence) throws IOException { List<RuleMatch> ruleMatches = new ArrayList<>(); for (AnalyzedTokenReadings readings : sentence.getTokensWithoutWhitespace()) { String word = readings.getToken(); for (Pair pair : pairs) { String variant = null; if (word.contains(pair.part1)) { variant = word.replaceFirst(pair.part1, pair.part2); } else if (word.contains(pair.part2)) { variant = word.replaceFirst(pair.part2, pair.part1); } if (variant == null) { continue; } long wordCount = lm.getCount(word); long variantCount = lm.getCount(variant); //float factor = variantCount / (float)Math.max(wordCount, 1); //System.out.println("word: " + word + " (" + wordCount + "), variant: " + variant + " (" + variantCount + "), factor: " + factor + ", pair: " + pair); if (variantCount > 0 && wordCount == 0 && !spellerRule.isMisspelled(variant)) { String msg; if (pair.part1Desc != null && pair.part2Desc != null) { msg = "Möglicher Tippfehler. " + uppercaseFirstChar(pair.part1) + ": " + pair.part1Desc + ", " + uppercaseFirstChar(pair.part2) + ": " + pair.part2Desc; } else { msg = "Möglicher Tippfehler."; } RuleMatch match = new RuleMatch(this, sentence, readings.getStartPos(), readings.getEndPos(), msg); match.setSuggestedReplacement(variant); ruleMatches.add(match); break; } } } return toRuleMatchArray(ruleMatches); } static class Pair { private final String part1; private final String part1Desc; private final String part2; private final String part2Desc; Pair(String part1, String part1Desc, String part2, String part2Desc) { this.part1 = part1; this.part1Desc = part1Desc; this.part2 = part2; this.part2Desc = part2Desc; } @Override public String toString() { return part1 + "/" + part2; } } }
languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/ProhibitedCompoundRule.java
/* LanguageTool, a natural language style checker * Copyright (C) 2018 Daniel Naber (http://www.danielnaber.de) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 * USA */ package org.languagetool.rules.de; import org.languagetool.AnalyzedSentence; import org.languagetool.AnalyzedTokenReadings; import org.languagetool.JLanguageTool; import org.languagetool.databroker.ResourceDataBroker; import org.languagetool.language.GermanyGerman; import org.languagetool.languagemodel.BaseLanguageModel; import org.languagetool.languagemodel.LanguageModel; import org.languagetool.rules.*; import java.io.IOException; import java.io.InputStream; import java.util.*; import static org.languagetool.tools.StringTools.*; /** * Find compounds that might be morphologically correct but are still probably wrong, like 'Lehrzeile'. * @since 4.1 */ public class ProhibitedCompoundRule extends Rule { // have objects static for better performance (rule gets initialized for every check) private static final List<Pair> lowercasePairs = Arrays.asList( // NOTE: words here must be all-lowercase // NOTE: no need to add words from confusion_sets.txt, they will be used automatically (if starting with uppercase char) new Pair("abschluss", "Ende", "Abschuss", "Vorgang des Abschießens, z.B. mit einer Waffe"), new Pair("wieder", "erneut, wiederholt, nochmal (Wiederholung, Wiedervorlage, ...)", "wider", "gegen, entgegen (Widerwille, Widerstand, Widerspruch, ...)"), new Pair("leer", "ohne Inhalt", "lehr", "bezogen auf Ausbildung und Wissen"), new Pair("Gewerbe", "wirtschaftliche Tätigkeit", "Gewebe", "gewebter Stoff; Verbund ähnlicher Zellen"), new Pair("Schuh", "Fußbekleidung", "Schul", "auf die Schule bezogen"), new Pair("klima", "langfristige Wetterzustände", "lima", "Hauptstadt von Peru"), new Pair("modell", "vereinfachtes Abbild der Wirklichkeit", "model", "Fotomodell"), new Pair("spitze", "spitzes Ende eines Gegenstandes", "spritze", "medizinisches Instrument zur Injektion") ); private static final GermanSpellerRule spellerRule = new GermanSpellerRule(JLanguageTool.getMessageBundle(), new GermanyGerman()); private static final List<String> ignoreWords = Arrays.asList("Die", "De"); private static final List<Pair> pairs = new ArrayList<>(); static { addUpperCaseVariants(); addItemsFromConfusionSets(); } private static void addUpperCaseVariants() { for (Pair lcPair : lowercasePairs) { pairs.add(new Pair(lcPair.part1, lcPair.part1Desc, lcPair.part2, lcPair.part2Desc)); String ucPart1 = uppercaseFirstChar(lcPair.part1); String ucPart2 = uppercaseFirstChar(lcPair.part2); if (!lcPair.part1.equals(ucPart1) || !lcPair.part2.equals(ucPart2)) { pairs.add(new Pair(ucPart1, lcPair.part1Desc, ucPart2, lcPair.part2Desc)); } } } private static void addItemsFromConfusionSets() { try { ResourceDataBroker dataBroker = JLanguageTool.getDataBroker(); try (InputStream confusionSetStream = dataBroker.getFromResourceDirAsStream("/de/confusion_sets.txt")) { ConfusionSetLoader loader = new ConfusionSetLoader(); Map<String, List<ConfusionSet>> confusionSet = loader.loadConfusionSet(confusionSetStream); for (Map.Entry<String, List<ConfusionSet>> entry : confusionSet.entrySet()) { for (ConfusionSet set : entry.getValue()) { boolean allUpper = set.getSet().stream().allMatch(k -> startsWithUppercase(k.getString()) && !ignoreWords.contains(k.getString())); if (allUpper) { Set<ConfusionString> cSet = set.getSet(); if (cSet.size() != 2) { throw new RuntimeException("Got confusion set with != 2 items: " + cSet); } Iterator<ConfusionString> it = cSet.iterator(); ConfusionString part1 = it.next(); ConfusionString part2 = it.next(); pairs.add(new Pair(part1.getString(), part1.getDescription(), part2.getString(), part2.getDescription())); pairs.add(new Pair(lowercaseFirstChar(part1.getString()), part1.getDescription(), lowercaseFirstChar(part2.getString()), part2.getDescription())); } } } } } catch (IOException e) { throw new RuntimeException(e); } } private final BaseLanguageModel lm; public ProhibitedCompoundRule(ResourceBundle messages, LanguageModel lm) { this.lm = (BaseLanguageModel) Objects.requireNonNull(lm); super.setCategory(Categories.TYPOS.getCategory(messages)); } @Override public String getId() { return "DE_PROHIBITED_COMPOUNDS"; } @Override public String getDescription() { return "Markiert wahrscheinlich falsche Komposita wie 'Lehrzeile', wenn 'Leerzeile' häufiger vorkommt."; } @Override public RuleMatch[] match(AnalyzedSentence sentence) throws IOException { List<RuleMatch> ruleMatches = new ArrayList<>(); for (AnalyzedTokenReadings readings : sentence.getTokensWithoutWhitespace()) { String word = readings.getToken(); for (Pair pair : pairs) { String variant = null; if (word.contains(pair.part1)) { variant = word.replaceFirst(pair.part1, pair.part2); } else if (word.contains(pair.part2)) { variant = word.replaceFirst(pair.part2, pair.part1); } if (variant == null) { continue; } long wordCount = lm.getCount(word); long variantCount = lm.getCount(variant); //float factor = variantCount / (float)Math.max(wordCount, 1); //System.out.println("word: " + word + " (" + wordCount + "), variant: " + variant + " (" + variantCount + "), factor: " + factor + ", pair: " + pair); if (variantCount > 0 && wordCount == 0 && !spellerRule.isMisspelled(variant)) { String msg; if (pair.part1Desc != null && pair.part2Desc != null) { msg = "Möglicher Tippfehler. " + uppercaseFirstChar(pair.part1) + ": " + pair.part1Desc + ", " + uppercaseFirstChar(pair.part2) + ": " + pair.part2Desc; } else { msg = "Möglicher Tippfehler."; } RuleMatch match = new RuleMatch(this, sentence, readings.getStartPos(), readings.getEndPos(), msg); match.setSuggestedReplacement(variant); ruleMatches.add(match); break; } } } return toRuleMatchArray(ruleMatches); } static class Pair { private final String part1; private final String part1Desc; private final String part2; private final String part2Desc; Pair(String part1, String part1Desc, String part2, String part2Desc) { this.part1 = part1; this.part1Desc = part1Desc; this.part2 = part2; this.part2Desc = part2Desc; } @Override public String toString() { return part1 + "/" + part2; } } }
[de] extend with Brache/Branche
languagetool-language-modules/de/src/main/java/org/languagetool/rules/de/ProhibitedCompoundRule.java
[de] extend with Brache/Branche
Java
apache-2.0
088e702cd8ca74e801126f5095c86c8d721e1864
0
cscorley/solr-only-mirror,cscorley/solr-only-mirror,cscorley/solr-only-mirror
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search.function; import org.apache.lucene.codecs.Codec; import org.apache.lucene.index.Norm; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.search.FieldCache; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Ignore; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; /** * Tests some basic functionality of Solr while demonstrating good * Best Practices for using AbstractSolrTestCase */ public class TestFunctionQuery extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-functionquery.xml","schema11.xml"); } String base = "external_foo_extf"; static long start = System.currentTimeMillis(); void makeExternalFile(String field, String contents, String charset) { String dir = h.getCore().getDataDir(); String filename = dir + "/external_" + field + "." + (start++); try { Writer out = new OutputStreamWriter(new FileOutputStream(filename), charset); out.write(contents); out.close(); } catch (Exception e) { throw new RuntimeException(e); } } void createIndex(String field, float... values) { // lrf.args.put("version","2.0"); for (float val : values) { String s = Float.toString(val); if (field!=null) assertU(adoc("id", s, field, s)); else assertU(adoc("id", s)); if (random.nextInt(100) < 20) { if (field!=null) assertU(adoc("id", s, field, s)); else assertU(adoc("id", s)); } if (random.nextInt(100) < 20) { assertU(commit()); } // System.out.println("added doc for " + val); } // assertU(optimize()); // squeeze out any possible deleted docs assertU(commit()); } // replace \0 with the field name and create a parseable string public String func(String field, String template) { StringBuilder sb = new StringBuilder("{!func}"); for (char ch : template.toCharArray()) { if (ch=='\0') { sb.append(field); continue; } sb.append(ch); } return sb.toString(); } void singleTest(String field, String funcTemplate, List<String> args, float... results) { String parseableQuery = func(field, funcTemplate); List<String> nargs = new ArrayList<String>(Arrays.asList("q", parseableQuery ,"fl", "*,score" ,"indent","on" ,"rows","100")); if (args != null) { for (String arg : args) { nargs.add(arg.replace("\0",field)); } } List<String> tests = new ArrayList<String>(); // Construct xpaths like the following: // "//doc[./float[@name='foo_pf']='10.0' and ./float[@name='score']='10.0']" for (int i=0; i<results.length; i+=2) { String xpath = "//doc[./float[@name='" + "id" + "']='" + results[i] + "' and ./float[@name='score']='" + results[i+1] + "']"; tests.add(xpath); } assertQ(req(nargs.toArray(new String[]{})) , tests.toArray(new String[]{}) ); } void singleTest(String field, String funcTemplate, float... results) { singleTest(field, funcTemplate, null, results); } void doTest(String field) { // lrf.args.put("version","2.0"); float[] vals = new float[] { 100,-4,0,10,25,5 }; createIndex(field,vals); createIndex(null, 88); // id with no value // test identity (straight field value) singleTest(field, "\0", 10,10); // test constant score singleTest(field,"1.414213", 10, 1.414213f); singleTest(field,"-1.414213", 10, -1.414213f); singleTest(field,"sum(\0,1)", 10, 11); singleTest(field,"sum(\0,\0)", 10, 20); singleTest(field,"sum(\0,\0,5)", 10, 25); singleTest(field,"sub(\0,1)", 10, 9); singleTest(field,"product(\0,1)", 10, 10); singleTest(field,"product(\0,-2,-4)", 10, 80); singleTest(field,"log(\0)",10,1, 100,2); singleTest(field,"sqrt(\0)",100,10, 25,5, 0,0); singleTest(field,"abs(\0)",10,10, -4,4); singleTest(field,"pow(\0,\0)",0,1, 5,3125); singleTest(field,"pow(\0,0.5)",100,10, 25,5, 0,0); singleTest(field,"div(1,\0)",-4,-.25f, 10,.1f, 100,.01f); singleTest(field,"div(1,1)",-4,1, 10,1); singleTest(field,"sqrt(abs(\0))",-4,2); singleTest(field,"sqrt(sum(29,\0))",-4,5); singleTest(field,"map(\0,0,0,500)",10,10, -4,-4, 0,500); singleTest(field,"map(\0,-4,5,500)",100,100, -4,500, 0,500, 5,500, 10,10, 25,25); singleTest(field,"scale(\0,-1,1)",-4,-1, 100,1, 0,-0.9230769f); singleTest(field,"scale(\0,-10,1000)",-4,-10, 100,1000, 0,28.846153f); // test that infinity doesn't mess up scale function singleTest(field,"scale(log(\0),-1000,1000)",100,1000); // test use of an ValueSourceParser plugin: nvl function singleTest(field,"nvl(\0,1)", 0, 1, 100, 100); // compose the ValueSourceParser plugin function with another function singleTest(field, "nvl(sum(0,\0),1)", 0, 1, 100, 100); // test simple embedded query singleTest(field,"query({!func v=\0})", 10, 10, 88, 0); // test default value for embedded query singleTest(field,"query({!lucene v='\0:[* TO *]'},8)", 88, 8); singleTest(field,"sum(query({!func v=\0},7.1),query({!func v=\0}))", 10, 20, 100, 200); // test with sub-queries specified by other request args singleTest(field,"query({!func v=$vv})", Arrays.asList("vv","\0"), 10, 10, 88, 0); singleTest(field,"query($vv)",Arrays.asList("vv","{!func}\0"), 10, 10, 88, 0); singleTest(field,"sum(query($v1,5),query($v1,7))", Arrays.asList("v1","\0:[* TO *]"), 88,12 ); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } @Test public void testFunctions() { doTest("foo_pf"); // a plain float field doTest("foo_f"); // a sortable float field doTest("foo_tf"); // a trie float field } @Test public void testExternalField() throws Exception { String field = "foo_extf"; float[] ids = {100,-4,0,10,25,5,77,23,55,-78,-45,-24,63,78,94,22,34,54321,261,-627}; createIndex(null,ids); // Unsorted field, largest first makeExternalFile(field, "54321=543210\n0=-999\n25=250","UTF-8"); // test identity (straight field value) singleTest(field, "\0", 54321, 543210, 0,-999, 25,250, 100, 1); Object orig = FileFloatSource.onlyForTesting; singleTest(field, "log(\0)"); // make sure the values were cached assertTrue(orig == FileFloatSource.onlyForTesting); singleTest(field, "sqrt(\0)"); assertTrue(orig == FileFloatSource.onlyForTesting); makeExternalFile(field, "0=1","UTF-8"); assertU(h.query("/reloadCache",lrf.makeRequest("",""))); singleTest(field, "sqrt(\0)"); assertTrue(orig != FileFloatSource.onlyForTesting); Random r = random; for (int i=0; i<10; i++) { // do more iterations for a thorough test int len = r.nextInt(ids.length+1); boolean sorted = r.nextBoolean(); // shuffle ids for (int j=0; j<ids.length; j++) { int other=r.nextInt(ids.length); float v=ids[0]; ids[0] = ids[other]; ids[other] = v; } if (sorted) { // sort only the first elements Arrays.sort(ids,0,len); } // make random values float[] vals = new float[len]; for (int j=0; j<len; j++) { vals[j] = r.nextInt(200)-100; } // make and write the external file StringBuilder sb = new StringBuilder(); for (int j=0; j<len; j++) { sb.append("" + ids[j] + "=" + vals[j]+"\n"); } makeExternalFile(field, sb.toString(),"UTF-8"); // make it visible assertU(h.query("/reloadCache",lrf.makeRequest("",""))); // test it float[] answers = new float[ids.length*2]; for (int j=0; j<len; j++) { answers[j*2] = ids[j]; answers[j*2+1] = vals[j]; } for (int j=len; j<ids.length; j++) { answers[j*2] = ids[j]; answers[j*2+1] = 1; // the default values } singleTest(field, "\0", answers); // System.out.println("Done test "+i); } purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } @Test public void testExternalFileFieldStringKeys() throws Exception { final String extField = "foo_extfs"; final String keyField = "sfile_s"; assertU(adoc("id", "991", keyField, "AAA=AAA")); assertU(adoc("id", "992", keyField, "BBB")); assertU(adoc("id", "993", keyField, "CCC=CCC")); assertU(commit()); makeExternalFile(extField, "AAA=AAA=543210\nBBB=-8\nCCC=CCC=250","UTF-8"); singleTest(extField,"\0",991,543210,992,-8,993,250); } @Test public void testExternalFileFieldNumericKey() throws Exception { final String extField = "eff_trie"; final String keyField = "eff_ti"; assertU(adoc("id", "991", keyField, "91")); assertU(adoc("id", "992", keyField, "92")); assertU(adoc("id", "993", keyField, "93")); assertU(commit()); makeExternalFile(extField, "91=543210\n92=-8\n93=250\n=67","UTF-8"); singleTest(extField,"\0",991,543210,992,-8,993,250); } @Test public void testGeneral() throws Exception { clearIndex(); assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); assertU(adoc("id","2", "a_t","how now brown cow")); assertU(commit()); // create more than one segment assertU(adoc("id","3", "a_t","brown cow")); assertU(adoc("id","4")); assertU(commit()); // create more than one segment assertU(adoc("id","5")); assertU(adoc("id","6", "a_t","cow cow cow cow cow")); assertU(commit()); // test relevancy functions assertQ(req("fl","*,score","q", "{!func}numdocs()", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}maxdoc()", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq(a_t,cow)", "fq","id:6"), "//float[@name='score']='3.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq('a_t','cow')", "fq","id:6"), "//float[@name='score']='3.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq($field,$value)", "fq","id:6", "field","a_t", "value","cow"), "//float[@name='score']='3.0'"); assertQ(req("fl","*,score","q", "{!func}termfreq(a_t,cow)", "fq","id:6"), "//float[@name='score']='5.0'"); TFIDFSimilarity similarity = new DefaultSimilarity(); // make sure it doesn't get a NPE if no terms are present in a field. assertQ(req("fl","*,score","q", "{!func}termfreq(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='0.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='0.0'"); assertQ(req("fl","*,score","q", "{!func}idf(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.idf(0,6) + "'"); assertQ(req("fl","*,score","q", "{!func}tf(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.tf(0) + "'"); assertQ(req("fl","*,score","q", "{!func}idf(a_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.idf(3,6) + "'"); assertQ(req("fl","*,score","q", "{!func}tf(a_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.tf(5) + "'"); FieldInvertState state = new FieldInvertState(); state.setBoost(1.0f); state.setLength(4); Norm norm = new Norm(); similarity.computeNorm(state, norm); float nrm = similarity.decodeNormValue(norm.field().numericValue().byteValue()); assertQ(req("fl","*,score","q", "{!func}norm(a_t)", "fq","id:2"), "//float[@name='score']='" + nrm + "'"); // sqrt(4)==2 and is exactly representable when quantized to a byte // test that ord and rord are working on a global index basis, not just // at the segment level (since Lucene 2.9 has switched to per-segment searching) assertQ(req("fl","*,score","q", "{!func}ord(id)", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}top(ord(id))", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}rord(id)", "fq","id:1"),"//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}top(rord(id))", "fq","id:1"),"//float[@name='score']='6.0'"); // test that we can subtract dates to millisecond precision assertQ(req("fl","*,score","q", "{!func}ms(a_tdt,b_tdt)", "fq","id:1"), "//float[@name='score']='-1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(b_tdt,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,2009-08-31T12:10:10.124Z)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.124Z,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,b_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z/SECOND,2009-08-31T12:10:10.124Z/SECOND)", "fq","id:1"), "//float[@name='score']='0.0'"); // test that we can specify "NOW" assertQ(req("fl","*,score","q", "{!func}ms(NOW)", "NOW","1000"), "//float[@name='score']='1000.0'"); for (int i=100; i<112; i++) { assertU(adoc("id",""+i, "text","batman")); } assertU(commit()); assertU(adoc("id","120", "text","batman superman")); // in a smaller segment assertU(adoc("id","121", "text","superman")); assertU(commit()); // superman has a higher df (thus lower idf) in one segment, but reversed in the complete index String q ="{!func}query($qq)"; String fq="id:120"; assertQ(req("fl","*,score","q", q, "qq","text:batman", "fq",fq), "//float[@name='score']<'1.0'"); assertQ(req("fl","*,score","q", q, "qq","text:superman", "fq",fq), "//float[@name='score']>'1.0'"); // test weighting through a function range query assertQ(req("fl","*,score", "fq",fq, "q", "{!frange l=1 u=10}query($qq)", "qq","text:superman"), "//*[@numFound='1']"); // test weighting through a complex function q ="{!func}sub(div(sum(0.0,product(1,query($qq))),1),0)"; assertQ(req("fl","*,score","q", q, "qq","text:batman", "fq",fq), "//float[@name='score']<'1.0'"); assertQ(req("fl","*,score","q", q, "qq","text:superman", "fq",fq), "//float[@name='score']>'1.0'"); // test full param dereferencing assertQ(req("fl","*,score","q", "{!func}add($v1,$v2)", "v1","add($v3,$v4)", "v2","1", "v3","2", "v4","5" , "fq","id:1"), "//float[@name='score']='8.0'"); // test ability to parse multiple values assertQ(req("fl","*,score","q", "{!func}dist(2,vector(1,1),$pt)", "pt","3,1" , "fq","id:1"), "//float[@name='score']='2.0'"); // test that extra stuff after a function causes an error try { assertQ(req("fl","*,score","q", "{!func}10 wow dude ignore_exception")); fail(); } catch (Exception e) { // OK } // test that sorting by function weights correctly. superman should sort higher than batman due to idf of the whole index assertQ(req("q", "*:*", "fq","id:120 OR id:121", "sort","{!func v=$sortfunc} desc", "sortfunc","query($qq)", "qq","text:(batman OR superman)") ,"*//doc[1]/float[.='120.0']" ,"*//doc[2]/float[.='121.0']" ); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } /** * test collection-level term stats (new in 4.x indexes) */ public void testTotalTermFreq() throws Exception { assumeFalse("PreFlex codec does not support collection-level term stats", "Lucene3x".equals(Codec.getDefault().getName())); clearIndex(); assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); assertU(adoc("id","2", "a_t","how now brown cow")); assertU(commit()); // create more than one segment assertU(adoc("id","3", "a_t","brown cow")); assertU(adoc("id","4")); assertU(commit()); // create more than one segment assertU(adoc("id","5")); assertU(adoc("id","6", "a_t","cow cow cow cow cow")); assertU(commit()); assertQ(req("fl","*,score","q", "{!func}totaltermfreq('a_t','cow')", "fq","id:6"), "//float[@name='score']='7.0'"); assertQ(req("fl","*,score","q", "{!func}ttf(a_t,'cow')", "fq","id:6"), "//float[@name='score']='7.0'"); assertQ(req("fl","*,score","q", "{!func}sumtotaltermfreq('a_t')", "fq","id:6"), "//float[@name='score']='11.0'"); assertQ(req("fl","*,score","q", "{!func}sttf(a_t)", "fq","id:6"), "//float[@name='score']='11.0'"); } @Test public void testSortByFunc() throws Exception { assertU(adoc("id", "1", "const_s", "xx", "x_i", "100", "1_s", "a", "x:x_i", "100", "1-1_s", "a")); assertU(adoc("id", "2", "const_s", "xx", "x_i", "300", "1_s", "c", "x:x_i", "300", "1-1_s", "c")); assertU(adoc("id", "3", "const_s", "xx", "x_i", "200", "1_s", "b", "x:x_i", "200", "1-1_s", "b")); assertU(commit()); String desc = "/response/docs==[{'x_i':300},{'x_i':200},{'x_i':100}]"; String asc = "/response/docs==[{'x_i':100},{'x_i':200},{'x_i':300}]"; String threeonetwo = "/response/docs==[{'x_i':200},{'x_i':100},{'x_i':300}]"; String q = "id:[1 TO 3]"; assertJQ(req("q",q, "fl","x_i", "sort","add(x_i,x_i) desc") ,desc ); // param sub of entire function assertJQ(req("q",q, "fl","x_i", "sort", "const_s asc, $x asc", "x","add(x_i,x_i)") ,asc ); // multiple functions assertJQ(req("q",q, "fl","x_i", "sort", "$x asc, const_s asc, $y desc", "x", "5", "y","add(x_i,x_i)") ,desc ); // multiple functions inline assertJQ(req("q",q, "fl","x_i", "sort", "add( 10 , 10 ) asc, const_s asc, add(x_i , $const) desc", "const","50") ,desc ); // test function w/ local params + func inline assertJQ(req("q",q, "fl","x_i", "sort", "const_s asc, {!key=foo}add(x_i,x_i) desc") ,desc ); assertJQ(req("q",q, "fl","x_i", "sort", "{!key=foo}add(x_i,x_i) desc, const_s asc") ,desc ); // test multiple functions w/ local params + func inline assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar}add(10,20) asc, const_s asc, {!key=foo}add(x_i,x_i) desc") ,desc ); // test multiple functions w/ local param value not inlined assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar v=$s1} asc, {!key=foo v=$s2} desc", "s1","add(3,4)", "s2","add(x_i,5)") ,desc ); // no space between inlined localparams and sort order assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar v=$s1}asc,const_s asc,{!key=foo v=$s2}desc", "s1","add(3,4)", "s2","add(x_i,5)") ,desc ); // field name that isn't a legal java Identifier // and starts with a number to trick function parser assertJQ(req("q",q, "fl","x_i", "sort", "1_s asc") ,asc ); assertJQ(req("q",q, "fl","x_i", "sort", "x:x_i desc") ,desc ); assertJQ(req("q",q, "fl","x_i", "sort", "1-1_s asc") ,asc ); // really ugly field name that isn't a java Id, and can't be // parsed as a func, but sorted fine in Solr 1.4 assertJQ(req("q",q, "fl","x_i", "sort", "[]_s asc, {!key=foo}add(x_i,x_i) desc") ,desc ); // use localparms to sort by a lucene query, then a function assertJQ(req("q",q, "fl","x_i", "sort", "{!lucene v='id:3'}desc, {!key=foo}add(x_i,x_i) asc") ,threeonetwo ); } @Test public void testDegreeRads() throws Exception { assertU(adoc("id", "1", "x_td", "0", "y_td", "0")); assertU(adoc("id", "2", "x_td", "90", "y_td", String.valueOf(Math.PI / 2))); assertU(adoc("id", "3", "x_td", "45", "y_td", String.valueOf(Math.PI / 4))); assertU(commit()); assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:1"), "//float[@name='score']='0.0'"); assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:2"), "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:3"), "//float[@name='score']='" + (float) (Math.PI / 4) + "'"); assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:1"), "//float[@name='score']='0.0'"); assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:2"), "//float[@name='score']='90.0'"); assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:3"), "//float[@name='score']='45.0'"); } @Test public void testStrDistance() throws Exception { assertU(adoc("id", "1", "x_s", "foil")); assertU(commit()); assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', edit)", "fq", "id:1"), "//float[@name='score']='0.75'"); assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', jw)", "fq", "id:1"), "//float[@name='score']='0.8833333'"); assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', ngram, 2)", "fq", "id:1"), "//float[@name='score']='0.875'"); } public void dofunc(String func, double val) throws Exception { // String sval = Double.toString(val); String sval = Float.toString((float)val); assertQ(req("fl", "*,score", "defType","func", "fq","id:1", "q",func), "//float[@name='score']='" + sval + "'"); } @Test public void testFuncs() throws Exception { assertU(adoc("id", "1", "foo_d", "9")); assertU(commit()); dofunc("1.0", 1.0); dofunc("e()", Math.E); dofunc("pi()", Math.PI); dofunc("add(2,3)", 2+3); dofunc("mul(2,3)", 2*3); dofunc("rad(45)", Math.toRadians(45)); dofunc("deg(.5)", Math.toDegrees(.5)); dofunc("sqrt(9)", Math.sqrt(9)); dofunc("cbrt(8)", Math.cbrt(8)); dofunc("max(0,1)", Math.max(0,1)); dofunc("max(10,3,8,7,5,4)", Math.max(Math.max(Math.max(Math.max(Math.max(10,3),8),7),5),4)); dofunc("min(0,1)", Math.min(0,1)); dofunc("min(10,3,8,7,5,4)", Math.min(Math.min(Math.min(Math.min(Math.min(10,3),8),7),5),4)); dofunc("log(100)", Math.log10(100)); dofunc("ln(3)", Math.log(3)); dofunc("exp(1)", Math.exp(1)); dofunc("sin(.5)", Math.sin(.5)); dofunc("cos(.5)", Math.cos(.5)); dofunc("tan(.5)", Math.tan(.5)); dofunc("asin(.5)", Math.asin(.5)); dofunc("acos(.5)", Math.acos(.5)); dofunc("atan(.5)", Math.atan(.5)); dofunc("sinh(.5)", Math.sinh(.5)); dofunc("cosh(.5)", Math.cosh(.5)); dofunc("tanh(.5)", Math.tanh(.5)); dofunc("ceil(2.3)", Math.ceil(2.3)); dofunc("floor(2.3)", Math.floor(2.3)); dofunc("rint(2.3)", Math.rint(2.3)); dofunc("pow(2,0.5)", Math.pow(2,0.5)); dofunc("hypot(3,4)", Math.hypot(3,4)); dofunc("atan2(.25,.5)", Math.atan2(.25,.5)); } /** * verify that both the field("...") value source parser as well as * ExternalFileField work with esoteric field names */ @Test public void testExternalFieldValueSourceParser() { clearIndex(); String field = "CoMpleX fieldName _extf"; String fieldAsFunc = "field(\"CoMpleX fieldName _extf\")"; float[] ids = {100,-4,0,10,25,5,77,23,55,-78,-45,-24,63,78,94,22,34,54321,261,-627}; createIndex(null,ids); // Unsorted field, largest first makeExternalFile(field, "54321=543210\n0=-999\n25=250","UTF-8"); // test identity (straight field value) singleTest(fieldAsFunc, "\0", 54321, 543210, 0,-999, 25,250, 100, 1); Object orig = FileFloatSource.onlyForTesting; singleTest(fieldAsFunc, "log(\0)"); // make sure the values were cached assertTrue(orig == FileFloatSource.onlyForTesting); singleTest(fieldAsFunc, "sqrt(\0)"); assertTrue(orig == FileFloatSource.onlyForTesting); makeExternalFile(field, "0=1","UTF-8"); assertU(adoc("id", "10000")); // will get same reader if no index change assertU(commit()); singleTest(fieldAsFunc, "sqrt(\0)"); assertTrue(orig != FileFloatSource.onlyForTesting); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } /** * some platforms don't allow quote characters in filenames, so * in addition to testExternalFieldValueSourceParser above, test a field * name with quotes in it that does NOT use ExternalFileField * @see #testExternalFieldValueSourceParser */ @Test public void testFieldValueSourceParser() { clearIndex(); String field = "CoMpleX \" fieldName _f"; String fieldAsFunc = "field(\"CoMpleX \\\" fieldName _f\")"; float[] ids = {100,-4,0,10,25,5,77,1}; createIndex(field, ids); // test identity (straight field value) singleTest(fieldAsFunc, "\0", 100,100, -4,-4, 0,0, 10,10, 25,25, 5,5, 77,77, 1,1); singleTest(fieldAsFunc, "sqrt(\0)", 100,10, 25,5, 0,0, 1,1); singleTest(fieldAsFunc, "log(\0)", 1,0); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } @Test public void testBooleanFunctions() throws Exception { assertU(adoc("id", "1", "text", "hello", "foo_s","A", "foo_ti", "0", "foo_tl","0")); assertU(adoc("id", "2" , "foo_ti","10", "foo_tl","11")); assertU(commit()); // true and false functions and constants assertJQ(req("q", "id:1", "fl", "t:true(),f:false(),tt:{!func}true,ff:{!func}false") , "/response/docs/[0]=={'t':true,'f':false,'tt':true,'ff':false}"); // test that exists(query) depends on the query matching the document assertJQ(req("q", "id:1", "fl", "t:exists(query($q1)),f:exists(query($q2))", "q1","text:hello", "q2","text:there") , "/response/docs/[0]=={'t':true,'f':false}"); // test if() assertJQ(req("q", "id:1", "fl", "a1:if(true,'A','B')", "fl","b1:if(false,'A','B')") , "/response/docs/[0]=={'a1':'A', 'b1':'B'}"); // test boolean operators assertJQ(req("q", "id:1", "fl", "t1:and(true,true)", "fl","f1:and(true,false)", "fl","f2:and(false,true)", "fl","f3:and(false,false)") , "/response/docs/[0]=={'t1':true, 'f1':false, 'f2':false, 'f3':false}"); assertJQ(req("q", "id:1", "fl", "t1:or(true,true)", "fl","t2:or(true,false)", "fl","t3:or(false,true)", "fl","f1:or(false,false)") , "/response/docs/[0]=={'t1':true, 't2':true, 't3':true, 'f1':false}"); assertJQ(req("q", "id:1", "fl", "f1:xor(true,true)", "fl","t1:xor(true,false)", "fl","t2:xor(false,true)", "fl","f2:xor(false,false)") , "/response/docs/[0]=={'t1':true, 't2':true, 'f1':false, 'f2':false}"); assertJQ(req("q", "id:1", "fl", "t:not(false),f:not(true)") , "/response/docs/[0]=={'t':true, 'f':false}"); // def(), the default function that returns the first value that exists assertJQ(req("q", "id:1", "fl", "x:def(id,123.0), y:def(foo_f,234.0)") , "/response/docs/[0]=={'x':1.0, 'y':234.0}"); assertJQ(req("q", "id:1", "fl", "x:def(foo_s,'Q'), y:def(missing_s,'W')") , "/response/docs/[0]=={'x':'A', 'y':'W'}"); // test constant conversion to boolean assertJQ(req("q", "id:1", "fl", "a:not(0), b:not(1), c:not(0.0), d:not(1.1), e:not('A')") , "/response/docs/[0]=={'a':true, 'b':false, 'c':true, 'd':false, 'e':false}"); } @Test public void testPseudoFieldFunctions() throws Exception { assertU(adoc("id", "1", "text", "hello", "foo_s","A")); assertU(adoc("id", "2")); assertU(commit()); assertJQ(req("q", "id:1", "fl", "a:1,b:2.0,c:'X',d:{!func}foo_s,e:{!func}bar_s") // if exists() is false, no pseudo-field should be added , "/response/docs/[0]=={'a':1, 'b':2.0,'c':'X','d':'A'}"); } }
core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.solr.search.function; import org.apache.lucene.codecs.Codec; import org.apache.lucene.index.FieldInvertState; import org.apache.lucene.search.FieldCache; import org.apache.lucene.search.similarities.DefaultSimilarity; import org.apache.lucene.search.similarities.TFIDFSimilarity; import org.apache.solr.SolrTestCaseJ4; import org.apache.solr.common.params.SolrParams; import org.apache.solr.common.util.NamedList; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Ignore; import java.io.FileOutputStream; import java.io.OutputStreamWriter; import java.io.Writer; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Random; /** * Tests some basic functionality of Solr while demonstrating good * Best Practices for using AbstractSolrTestCase */ public class TestFunctionQuery extends SolrTestCaseJ4 { @BeforeClass public static void beforeClass() throws Exception { initCore("solrconfig-functionquery.xml","schema11.xml"); } String base = "external_foo_extf"; static long start = System.currentTimeMillis(); void makeExternalFile(String field, String contents, String charset) { String dir = h.getCore().getDataDir(); String filename = dir + "/external_" + field + "." + (start++); try { Writer out = new OutputStreamWriter(new FileOutputStream(filename), charset); out.write(contents); out.close(); } catch (Exception e) { throw new RuntimeException(e); } } void createIndex(String field, float... values) { // lrf.args.put("version","2.0"); for (float val : values) { String s = Float.toString(val); if (field!=null) assertU(adoc("id", s, field, s)); else assertU(adoc("id", s)); if (random.nextInt(100) < 20) { if (field!=null) assertU(adoc("id", s, field, s)); else assertU(adoc("id", s)); } if (random.nextInt(100) < 20) { assertU(commit()); } // System.out.println("added doc for " + val); } // assertU(optimize()); // squeeze out any possible deleted docs assertU(commit()); } // replace \0 with the field name and create a parseable string public String func(String field, String template) { StringBuilder sb = new StringBuilder("{!func}"); for (char ch : template.toCharArray()) { if (ch=='\0') { sb.append(field); continue; } sb.append(ch); } return sb.toString(); } void singleTest(String field, String funcTemplate, List<String> args, float... results) { String parseableQuery = func(field, funcTemplate); List<String> nargs = new ArrayList<String>(Arrays.asList("q", parseableQuery ,"fl", "*,score" ,"indent","on" ,"rows","100")); if (args != null) { for (String arg : args) { nargs.add(arg.replace("\0",field)); } } List<String> tests = new ArrayList<String>(); // Construct xpaths like the following: // "//doc[./float[@name='foo_pf']='10.0' and ./float[@name='score']='10.0']" for (int i=0; i<results.length; i+=2) { String xpath = "//doc[./float[@name='" + "id" + "']='" + results[i] + "' and ./float[@name='score']='" + results[i+1] + "']"; tests.add(xpath); } assertQ(req(nargs.toArray(new String[]{})) , tests.toArray(new String[]{}) ); } void singleTest(String field, String funcTemplate, float... results) { singleTest(field, funcTemplate, null, results); } void doTest(String field) { // lrf.args.put("version","2.0"); float[] vals = new float[] { 100,-4,0,10,25,5 }; createIndex(field,vals); createIndex(null, 88); // id with no value // test identity (straight field value) singleTest(field, "\0", 10,10); // test constant score singleTest(field,"1.414213", 10, 1.414213f); singleTest(field,"-1.414213", 10, -1.414213f); singleTest(field,"sum(\0,1)", 10, 11); singleTest(field,"sum(\0,\0)", 10, 20); singleTest(field,"sum(\0,\0,5)", 10, 25); singleTest(field,"sub(\0,1)", 10, 9); singleTest(field,"product(\0,1)", 10, 10); singleTest(field,"product(\0,-2,-4)", 10, 80); singleTest(field,"log(\0)",10,1, 100,2); singleTest(field,"sqrt(\0)",100,10, 25,5, 0,0); singleTest(field,"abs(\0)",10,10, -4,4); singleTest(field,"pow(\0,\0)",0,1, 5,3125); singleTest(field,"pow(\0,0.5)",100,10, 25,5, 0,0); singleTest(field,"div(1,\0)",-4,-.25f, 10,.1f, 100,.01f); singleTest(field,"div(1,1)",-4,1, 10,1); singleTest(field,"sqrt(abs(\0))",-4,2); singleTest(field,"sqrt(sum(29,\0))",-4,5); singleTest(field,"map(\0,0,0,500)",10,10, -4,-4, 0,500); singleTest(field,"map(\0,-4,5,500)",100,100, -4,500, 0,500, 5,500, 10,10, 25,25); singleTest(field,"scale(\0,-1,1)",-4,-1, 100,1, 0,-0.9230769f); singleTest(field,"scale(\0,-10,1000)",-4,-10, 100,1000, 0,28.846153f); // test that infinity doesn't mess up scale function singleTest(field,"scale(log(\0),-1000,1000)",100,1000); // test use of an ValueSourceParser plugin: nvl function singleTest(field,"nvl(\0,1)", 0, 1, 100, 100); // compose the ValueSourceParser plugin function with another function singleTest(field, "nvl(sum(0,\0),1)", 0, 1, 100, 100); // test simple embedded query singleTest(field,"query({!func v=\0})", 10, 10, 88, 0); // test default value for embedded query singleTest(field,"query({!lucene v='\0:[* TO *]'},8)", 88, 8); singleTest(field,"sum(query({!func v=\0},7.1),query({!func v=\0}))", 10, 20, 100, 200); // test with sub-queries specified by other request args singleTest(field,"query({!func v=$vv})", Arrays.asList("vv","\0"), 10, 10, 88, 0); singleTest(field,"query($vv)",Arrays.asList("vv","{!func}\0"), 10, 10, 88, 0); singleTest(field,"sum(query($v1,5),query($v1,7))", Arrays.asList("v1","\0:[* TO *]"), 88,12 ); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } @Test public void testFunctions() { doTest("foo_pf"); // a plain float field doTest("foo_f"); // a sortable float field doTest("foo_tf"); // a trie float field } @Test public void testExternalField() throws Exception { String field = "foo_extf"; float[] ids = {100,-4,0,10,25,5,77,23,55,-78,-45,-24,63,78,94,22,34,54321,261,-627}; createIndex(null,ids); // Unsorted field, largest first makeExternalFile(field, "54321=543210\n0=-999\n25=250","UTF-8"); // test identity (straight field value) singleTest(field, "\0", 54321, 543210, 0,-999, 25,250, 100, 1); Object orig = FileFloatSource.onlyForTesting; singleTest(field, "log(\0)"); // make sure the values were cached assertTrue(orig == FileFloatSource.onlyForTesting); singleTest(field, "sqrt(\0)"); assertTrue(orig == FileFloatSource.onlyForTesting); makeExternalFile(field, "0=1","UTF-8"); assertU(h.query("/reloadCache",lrf.makeRequest("",""))); singleTest(field, "sqrt(\0)"); assertTrue(orig != FileFloatSource.onlyForTesting); Random r = random; for (int i=0; i<10; i++) { // do more iterations for a thorough test int len = r.nextInt(ids.length+1); boolean sorted = r.nextBoolean(); // shuffle ids for (int j=0; j<ids.length; j++) { int other=r.nextInt(ids.length); float v=ids[0]; ids[0] = ids[other]; ids[other] = v; } if (sorted) { // sort only the first elements Arrays.sort(ids,0,len); } // make random values float[] vals = new float[len]; for (int j=0; j<len; j++) { vals[j] = r.nextInt(200)-100; } // make and write the external file StringBuilder sb = new StringBuilder(); for (int j=0; j<len; j++) { sb.append("" + ids[j] + "=" + vals[j]+"\n"); } makeExternalFile(field, sb.toString(),"UTF-8"); // make it visible assertU(h.query("/reloadCache",lrf.makeRequest("",""))); // test it float[] answers = new float[ids.length*2]; for (int j=0; j<len; j++) { answers[j*2] = ids[j]; answers[j*2+1] = vals[j]; } for (int j=len; j<ids.length; j++) { answers[j*2] = ids[j]; answers[j*2+1] = 1; // the default values } singleTest(field, "\0", answers); // System.out.println("Done test "+i); } purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } @Test public void testExternalFileFieldStringKeys() throws Exception { final String extField = "foo_extfs"; final String keyField = "sfile_s"; assertU(adoc("id", "991", keyField, "AAA=AAA")); assertU(adoc("id", "992", keyField, "BBB")); assertU(adoc("id", "993", keyField, "CCC=CCC")); assertU(commit()); makeExternalFile(extField, "AAA=AAA=543210\nBBB=-8\nCCC=CCC=250","UTF-8"); singleTest(extField,"\0",991,543210,992,-8,993,250); } @Test public void testExternalFileFieldNumericKey() throws Exception { final String extField = "eff_trie"; final String keyField = "eff_ti"; assertU(adoc("id", "991", keyField, "91")); assertU(adoc("id", "992", keyField, "92")); assertU(adoc("id", "993", keyField, "93")); assertU(commit()); makeExternalFile(extField, "91=543210\n92=-8\n93=250\n=67","UTF-8"); singleTest(extField,"\0",991,543210,992,-8,993,250); } @Test public void testGeneral() throws Exception { clearIndex(); assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); assertU(adoc("id","2", "a_t","how now brown cow")); assertU(commit()); // create more than one segment assertU(adoc("id","3", "a_t","brown cow")); assertU(adoc("id","4")); assertU(commit()); // create more than one segment assertU(adoc("id","5")); assertU(adoc("id","6", "a_t","cow cow cow cow cow")); assertU(commit()); // test relevancy functions assertQ(req("fl","*,score","q", "{!func}numdocs()", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}maxdoc()", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq(a_t,cow)", "fq","id:6"), "//float[@name='score']='3.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq('a_t','cow')", "fq","id:6"), "//float[@name='score']='3.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq($field,$value)", "fq","id:6", "field","a_t", "value","cow"), "//float[@name='score']='3.0'"); assertQ(req("fl","*,score","q", "{!func}termfreq(a_t,cow)", "fq","id:6"), "//float[@name='score']='5.0'"); TFIDFSimilarity similarity = new DefaultSimilarity(); // make sure it doesn't get a NPE if no terms are present in a field. assertQ(req("fl","*,score","q", "{!func}termfreq(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='0.0'"); assertQ(req("fl","*,score","q", "{!func}docfreq(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='0.0'"); assertQ(req("fl","*,score","q", "{!func}idf(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.idf(0,6) + "'"); assertQ(req("fl","*,score","q", "{!func}tf(nofield_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.tf(0) + "'"); assertQ(req("fl","*,score","q", "{!func}idf(a_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.idf(3,6) + "'"); assertQ(req("fl","*,score","q", "{!func}tf(a_t,cow)", "fq","id:6"), "//float[@name='score']='" + similarity.tf(5) + "'"); FieldInvertState state = new FieldInvertState(); state.setBoost(1.0f); state.setLength(4); assertQ(req("fl","*,score","q", "{!func}norm(a_t)", "fq","id:2"), "//float[@name='score']='" + similarity.decodeNormValue(similarity.computeNorm(state)) + "'"); // sqrt(4)==2 and is exactly representable when quantized to a byte // test that ord and rord are working on a global index basis, not just // at the segment level (since Lucene 2.9 has switched to per-segment searching) assertQ(req("fl","*,score","q", "{!func}ord(id)", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}top(ord(id))", "fq","id:6"), "//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}rord(id)", "fq","id:1"),"//float[@name='score']='6.0'"); assertQ(req("fl","*,score","q", "{!func}top(rord(id))", "fq","id:1"),"//float[@name='score']='6.0'"); // test that we can subtract dates to millisecond precision assertQ(req("fl","*,score","q", "{!func}ms(a_tdt,b_tdt)", "fq","id:1"), "//float[@name='score']='-1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(b_tdt,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,2009-08-31T12:10:10.124Z)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.124Z,a_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z,b_tdt)", "fq","id:1"), "//float[@name='score']='1.0'"); assertQ(req("fl","*,score","q", "{!func}ms(2009-08-31T12:10:10.125Z/SECOND,2009-08-31T12:10:10.124Z/SECOND)", "fq","id:1"), "//float[@name='score']='0.0'"); // test that we can specify "NOW" assertQ(req("fl","*,score","q", "{!func}ms(NOW)", "NOW","1000"), "//float[@name='score']='1000.0'"); for (int i=100; i<112; i++) { assertU(adoc("id",""+i, "text","batman")); } assertU(commit()); assertU(adoc("id","120", "text","batman superman")); // in a smaller segment assertU(adoc("id","121", "text","superman")); assertU(commit()); // superman has a higher df (thus lower idf) in one segment, but reversed in the complete index String q ="{!func}query($qq)"; String fq="id:120"; assertQ(req("fl","*,score","q", q, "qq","text:batman", "fq",fq), "//float[@name='score']<'1.0'"); assertQ(req("fl","*,score","q", q, "qq","text:superman", "fq",fq), "//float[@name='score']>'1.0'"); // test weighting through a function range query assertQ(req("fl","*,score", "fq",fq, "q", "{!frange l=1 u=10}query($qq)", "qq","text:superman"), "//*[@numFound='1']"); // test weighting through a complex function q ="{!func}sub(div(sum(0.0,product(1,query($qq))),1),0)"; assertQ(req("fl","*,score","q", q, "qq","text:batman", "fq",fq), "//float[@name='score']<'1.0'"); assertQ(req("fl","*,score","q", q, "qq","text:superman", "fq",fq), "//float[@name='score']>'1.0'"); // test full param dereferencing assertQ(req("fl","*,score","q", "{!func}add($v1,$v2)", "v1","add($v3,$v4)", "v2","1", "v3","2", "v4","5" , "fq","id:1"), "//float[@name='score']='8.0'"); // test ability to parse multiple values assertQ(req("fl","*,score","q", "{!func}dist(2,vector(1,1),$pt)", "pt","3,1" , "fq","id:1"), "//float[@name='score']='2.0'"); // test that extra stuff after a function causes an error try { assertQ(req("fl","*,score","q", "{!func}10 wow dude ignore_exception")); fail(); } catch (Exception e) { // OK } // test that sorting by function weights correctly. superman should sort higher than batman due to idf of the whole index assertQ(req("q", "*:*", "fq","id:120 OR id:121", "sort","{!func v=$sortfunc} desc", "sortfunc","query($qq)", "qq","text:(batman OR superman)") ,"*//doc[1]/float[.='120.0']" ,"*//doc[2]/float[.='121.0']" ); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } /** * test collection-level term stats (new in 4.x indexes) */ public void testTotalTermFreq() throws Exception { assumeFalse("PreFlex codec does not support collection-level term stats", "Lucene3x".equals(Codec.getDefault().getName())); clearIndex(); assertU(adoc("id","1", "a_tdt","2009-08-31T12:10:10.123Z", "b_tdt","2009-08-31T12:10:10.124Z")); assertU(adoc("id","2", "a_t","how now brown cow")); assertU(commit()); // create more than one segment assertU(adoc("id","3", "a_t","brown cow")); assertU(adoc("id","4")); assertU(commit()); // create more than one segment assertU(adoc("id","5")); assertU(adoc("id","6", "a_t","cow cow cow cow cow")); assertU(commit()); assertQ(req("fl","*,score","q", "{!func}totaltermfreq('a_t','cow')", "fq","id:6"), "//float[@name='score']='7.0'"); assertQ(req("fl","*,score","q", "{!func}ttf(a_t,'cow')", "fq","id:6"), "//float[@name='score']='7.0'"); assertQ(req("fl","*,score","q", "{!func}sumtotaltermfreq('a_t')", "fq","id:6"), "//float[@name='score']='11.0'"); assertQ(req("fl","*,score","q", "{!func}sttf(a_t)", "fq","id:6"), "//float[@name='score']='11.0'"); } @Test public void testSortByFunc() throws Exception { assertU(adoc("id", "1", "const_s", "xx", "x_i", "100", "1_s", "a", "x:x_i", "100", "1-1_s", "a")); assertU(adoc("id", "2", "const_s", "xx", "x_i", "300", "1_s", "c", "x:x_i", "300", "1-1_s", "c")); assertU(adoc("id", "3", "const_s", "xx", "x_i", "200", "1_s", "b", "x:x_i", "200", "1-1_s", "b")); assertU(commit()); String desc = "/response/docs==[{'x_i':300},{'x_i':200},{'x_i':100}]"; String asc = "/response/docs==[{'x_i':100},{'x_i':200},{'x_i':300}]"; String threeonetwo = "/response/docs==[{'x_i':200},{'x_i':100},{'x_i':300}]"; String q = "id:[1 TO 3]"; assertJQ(req("q",q, "fl","x_i", "sort","add(x_i,x_i) desc") ,desc ); // param sub of entire function assertJQ(req("q",q, "fl","x_i", "sort", "const_s asc, $x asc", "x","add(x_i,x_i)") ,asc ); // multiple functions assertJQ(req("q",q, "fl","x_i", "sort", "$x asc, const_s asc, $y desc", "x", "5", "y","add(x_i,x_i)") ,desc ); // multiple functions inline assertJQ(req("q",q, "fl","x_i", "sort", "add( 10 , 10 ) asc, const_s asc, add(x_i , $const) desc", "const","50") ,desc ); // test function w/ local params + func inline assertJQ(req("q",q, "fl","x_i", "sort", "const_s asc, {!key=foo}add(x_i,x_i) desc") ,desc ); assertJQ(req("q",q, "fl","x_i", "sort", "{!key=foo}add(x_i,x_i) desc, const_s asc") ,desc ); // test multiple functions w/ local params + func inline assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar}add(10,20) asc, const_s asc, {!key=foo}add(x_i,x_i) desc") ,desc ); // test multiple functions w/ local param value not inlined assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar v=$s1} asc, {!key=foo v=$s2} desc", "s1","add(3,4)", "s2","add(x_i,5)") ,desc ); // no space between inlined localparams and sort order assertJQ(req("q",q, "fl","x_i", "sort", "{!key=bar v=$s1}asc,const_s asc,{!key=foo v=$s2}desc", "s1","add(3,4)", "s2","add(x_i,5)") ,desc ); // field name that isn't a legal java Identifier // and starts with a number to trick function parser assertJQ(req("q",q, "fl","x_i", "sort", "1_s asc") ,asc ); assertJQ(req("q",q, "fl","x_i", "sort", "x:x_i desc") ,desc ); assertJQ(req("q",q, "fl","x_i", "sort", "1-1_s asc") ,asc ); // really ugly field name that isn't a java Id, and can't be // parsed as a func, but sorted fine in Solr 1.4 assertJQ(req("q",q, "fl","x_i", "sort", "[]_s asc, {!key=foo}add(x_i,x_i) desc") ,desc ); // use localparms to sort by a lucene query, then a function assertJQ(req("q",q, "fl","x_i", "sort", "{!lucene v='id:3'}desc, {!key=foo}add(x_i,x_i) asc") ,threeonetwo ); } @Test public void testDegreeRads() throws Exception { assertU(adoc("id", "1", "x_td", "0", "y_td", "0")); assertU(adoc("id", "2", "x_td", "90", "y_td", String.valueOf(Math.PI / 2))); assertU(adoc("id", "3", "x_td", "45", "y_td", String.valueOf(Math.PI / 4))); assertU(commit()); assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:1"), "//float[@name='score']='0.0'"); assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:2"), "//float[@name='score']='" + (float) (Math.PI / 2) + "'"); assertQ(req("fl", "*,score", "q", "{!func}rad(x_td)", "fq", "id:3"), "//float[@name='score']='" + (float) (Math.PI / 4) + "'"); assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:1"), "//float[@name='score']='0.0'"); assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:2"), "//float[@name='score']='90.0'"); assertQ(req("fl", "*,score", "q", "{!func}deg(y_td)", "fq", "id:3"), "//float[@name='score']='45.0'"); } @Test public void testStrDistance() throws Exception { assertU(adoc("id", "1", "x_s", "foil")); assertU(commit()); assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', edit)", "fq", "id:1"), "//float[@name='score']='0.75'"); assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', jw)", "fq", "id:1"), "//float[@name='score']='0.8833333'"); assertQ(req("fl", "*,score", "q", "{!func}strdist(x_s, 'foit', ngram, 2)", "fq", "id:1"), "//float[@name='score']='0.875'"); } public void dofunc(String func, double val) throws Exception { // String sval = Double.toString(val); String sval = Float.toString((float)val); assertQ(req("fl", "*,score", "defType","func", "fq","id:1", "q",func), "//float[@name='score']='" + sval + "'"); } @Test public void testFuncs() throws Exception { assertU(adoc("id", "1", "foo_d", "9")); assertU(commit()); dofunc("1.0", 1.0); dofunc("e()", Math.E); dofunc("pi()", Math.PI); dofunc("add(2,3)", 2+3); dofunc("mul(2,3)", 2*3); dofunc("rad(45)", Math.toRadians(45)); dofunc("deg(.5)", Math.toDegrees(.5)); dofunc("sqrt(9)", Math.sqrt(9)); dofunc("cbrt(8)", Math.cbrt(8)); dofunc("max(0,1)", Math.max(0,1)); dofunc("max(10,3,8,7,5,4)", Math.max(Math.max(Math.max(Math.max(Math.max(10,3),8),7),5),4)); dofunc("min(0,1)", Math.min(0,1)); dofunc("min(10,3,8,7,5,4)", Math.min(Math.min(Math.min(Math.min(Math.min(10,3),8),7),5),4)); dofunc("log(100)", Math.log10(100)); dofunc("ln(3)", Math.log(3)); dofunc("exp(1)", Math.exp(1)); dofunc("sin(.5)", Math.sin(.5)); dofunc("cos(.5)", Math.cos(.5)); dofunc("tan(.5)", Math.tan(.5)); dofunc("asin(.5)", Math.asin(.5)); dofunc("acos(.5)", Math.acos(.5)); dofunc("atan(.5)", Math.atan(.5)); dofunc("sinh(.5)", Math.sinh(.5)); dofunc("cosh(.5)", Math.cosh(.5)); dofunc("tanh(.5)", Math.tanh(.5)); dofunc("ceil(2.3)", Math.ceil(2.3)); dofunc("floor(2.3)", Math.floor(2.3)); dofunc("rint(2.3)", Math.rint(2.3)); dofunc("pow(2,0.5)", Math.pow(2,0.5)); dofunc("hypot(3,4)", Math.hypot(3,4)); dofunc("atan2(.25,.5)", Math.atan2(.25,.5)); } /** * verify that both the field("...") value source parser as well as * ExternalFileField work with esoteric field names */ @Test public void testExternalFieldValueSourceParser() { clearIndex(); String field = "CoMpleX fieldName _extf"; String fieldAsFunc = "field(\"CoMpleX fieldName _extf\")"; float[] ids = {100,-4,0,10,25,5,77,23,55,-78,-45,-24,63,78,94,22,34,54321,261,-627}; createIndex(null,ids); // Unsorted field, largest first makeExternalFile(field, "54321=543210\n0=-999\n25=250","UTF-8"); // test identity (straight field value) singleTest(fieldAsFunc, "\0", 54321, 543210, 0,-999, 25,250, 100, 1); Object orig = FileFloatSource.onlyForTesting; singleTest(fieldAsFunc, "log(\0)"); // make sure the values were cached assertTrue(orig == FileFloatSource.onlyForTesting); singleTest(fieldAsFunc, "sqrt(\0)"); assertTrue(orig == FileFloatSource.onlyForTesting); makeExternalFile(field, "0=1","UTF-8"); assertU(adoc("id", "10000")); // will get same reader if no index change assertU(commit()); singleTest(fieldAsFunc, "sqrt(\0)"); assertTrue(orig != FileFloatSource.onlyForTesting); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } /** * some platforms don't allow quote characters in filenames, so * in addition to testExternalFieldValueSourceParser above, test a field * name with quotes in it that does NOT use ExternalFileField * @see #testExternalFieldValueSourceParser */ @Test public void testFieldValueSourceParser() { clearIndex(); String field = "CoMpleX \" fieldName _f"; String fieldAsFunc = "field(\"CoMpleX \\\" fieldName _f\")"; float[] ids = {100,-4,0,10,25,5,77,1}; createIndex(field, ids); // test identity (straight field value) singleTest(fieldAsFunc, "\0", 100,100, -4,-4, 0,0, 10,10, 25,25, 5,5, 77,77, 1,1); singleTest(fieldAsFunc, "sqrt(\0)", 100,10, 25,5, 0,0, 1,1); singleTest(fieldAsFunc, "log(\0)", 1,0); purgeFieldCache(FieldCache.DEFAULT); // avoid FC insanity } @Test public void testBooleanFunctions() throws Exception { assertU(adoc("id", "1", "text", "hello", "foo_s","A", "foo_ti", "0", "foo_tl","0")); assertU(adoc("id", "2" , "foo_ti","10", "foo_tl","11")); assertU(commit()); // true and false functions and constants assertJQ(req("q", "id:1", "fl", "t:true(),f:false(),tt:{!func}true,ff:{!func}false") , "/response/docs/[0]=={'t':true,'f':false,'tt':true,'ff':false}"); // test that exists(query) depends on the query matching the document assertJQ(req("q", "id:1", "fl", "t:exists(query($q1)),f:exists(query($q2))", "q1","text:hello", "q2","text:there") , "/response/docs/[0]=={'t':true,'f':false}"); // test if() assertJQ(req("q", "id:1", "fl", "a1:if(true,'A','B')", "fl","b1:if(false,'A','B')") , "/response/docs/[0]=={'a1':'A', 'b1':'B'}"); // test boolean operators assertJQ(req("q", "id:1", "fl", "t1:and(true,true)", "fl","f1:and(true,false)", "fl","f2:and(false,true)", "fl","f3:and(false,false)") , "/response/docs/[0]=={'t1':true, 'f1':false, 'f2':false, 'f3':false}"); assertJQ(req("q", "id:1", "fl", "t1:or(true,true)", "fl","t2:or(true,false)", "fl","t3:or(false,true)", "fl","f1:or(false,false)") , "/response/docs/[0]=={'t1':true, 't2':true, 't3':true, 'f1':false}"); assertJQ(req("q", "id:1", "fl", "f1:xor(true,true)", "fl","t1:xor(true,false)", "fl","t2:xor(false,true)", "fl","f2:xor(false,false)") , "/response/docs/[0]=={'t1':true, 't2':true, 'f1':false, 'f2':false}"); assertJQ(req("q", "id:1", "fl", "t:not(false),f:not(true)") , "/response/docs/[0]=={'t':true, 'f':false}"); // def(), the default function that returns the first value that exists assertJQ(req("q", "id:1", "fl", "x:def(id,123.0), y:def(foo_f,234.0)") , "/response/docs/[0]=={'x':1.0, 'y':234.0}"); assertJQ(req("q", "id:1", "fl", "x:def(foo_s,'Q'), y:def(missing_s,'W')") , "/response/docs/[0]=={'x':'A', 'y':'W'}"); // test constant conversion to boolean assertJQ(req("q", "id:1", "fl", "a:not(0), b:not(1), c:not(0.0), d:not(1.1), e:not('A')") , "/response/docs/[0]=={'a':true, 'b':false, 'c':true, 'd':false, 'e':false}"); } @Test public void testPseudoFieldFunctions() throws Exception { assertU(adoc("id", "1", "text", "hello", "foo_s","A")); assertU(adoc("id", "2")); assertU(commit()); assertJQ(req("q", "id:1", "fl", "a:1,b:2.0,c:'X',d:{!func}foo_s,e:{!func}bar_s") // if exists() is false, no pseudo-field should be added , "/response/docs/[0]=={'a':1, 'b':2.0,'c':'X','d':'A'}"); } }
LUCENE-3687: Allow similarity to encode norms other than a single byte git-svn-id: 308d55f399f3bd9aa0560a10e81a003040006c48@1232014 13f79535-47bb-0310-9956-ffa450edef68
core/src/test/org/apache/solr/search/function/TestFunctionQuery.java
LUCENE-3687: Allow similarity to encode norms other than a single byte
Java
apache-2.0
a582ca6018e503d6362ef942018d51211e5ebab5
0
tufangorel/hazelcast,emrahkocaman/hazelcast,dsukhoroslov/hazelcast,mdogan/hazelcast,mdogan/hazelcast,dbrimley/hazelcast,lmjacksoniii/hazelcast,mesutcelik/hazelcast,lmjacksoniii/hazelcast,juanavelez/hazelcast,mesutcelik/hazelcast,emre-aydin/hazelcast,Donnerbart/hazelcast,dbrimley/hazelcast,dbrimley/hazelcast,emrahkocaman/hazelcast,tufangorel/hazelcast,emre-aydin/hazelcast,tombujok/hazelcast,tkountis/hazelcast,Donnerbart/hazelcast,tkountis/hazelcast,emre-aydin/hazelcast,juanavelez/hazelcast,tkountis/hazelcast,dsukhoroslov/hazelcast,mdogan/hazelcast,mesutcelik/hazelcast,tombujok/hazelcast,Donnerbart/hazelcast,tufangorel/hazelcast
/* * Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.proxy; import com.hazelcast.concurrent.lock.proxy.LockProxySupport; import com.hazelcast.config.EntryListenerConfig; import com.hazelcast.config.MapConfig; import com.hazelcast.config.MapIndexConfig; import com.hazelcast.core.EntryListener; import com.hazelcast.core.EntryView; import com.hazelcast.core.HazelcastInstanceAware; import com.hazelcast.core.Member; import com.hazelcast.instance.MemberImpl; import com.hazelcast.map.*; import com.hazelcast.map.operation.*; import com.hazelcast.monitor.LocalMapStats; import com.hazelcast.monitor.impl.LocalMapStatsImpl; import com.hazelcast.nio.ClassLoaderUtil; import com.hazelcast.nio.serialization.Data; import com.hazelcast.partition.PartitionService; import com.hazelcast.partition.PartitionView; import com.hazelcast.query.Predicate; import com.hazelcast.spi.*; import com.hazelcast.spi.impl.BinaryOperationFactory; import com.hazelcast.util.ExceptionUtil; import com.hazelcast.util.IterationType; import com.hazelcast.util.QueryResultSet; import com.hazelcast.util.ThreadUtil; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static com.hazelcast.map.MapService.SERVICE_NAME; /** * @author enesakar 1/17/13 */ abstract class MapProxySupport extends AbstractDistributedObject<MapService> { protected static final String NULL_KEY_IS_NOT_ALLOWED = "Null key is not allowed!"; protected static final String NULL_VALUE_IS_NOT_ALLOWED = "Null value is not allowed!"; protected final String name; protected final MapConfig mapConfig; protected final LocalMapStatsImpl localMapStats; protected final LockProxySupport lockSupport; protected MapProxySupport(final String name, final MapService service, NodeEngine nodeEngine) { super(nodeEngine, service); this.name = name; mapConfig = service.getMapContainer(name).getMapConfig(); localMapStats = service.getLocalMapStatsImpl(name); lockSupport = new LockProxySupport(new DefaultObjectNamespace(MapService.SERVICE_NAME, name)); initializeListeners(nodeEngine); initializeIndexes(); } private void initializeIndexes() { for (MapIndexConfig index : mapConfig.getMapIndexConfigs()) { if (index.getAttribute() != null) { addIndex(index.getAttribute(), index.isOrdered()); } } } private void initializeListeners(NodeEngine nodeEngine) { List<EntryListenerConfig> listenerConfigs = mapConfig.getEntryListenerConfigs(); for (EntryListenerConfig listenerConfig : listenerConfigs) { EntryListener listener = null; if (listenerConfig.getImplementation() != null) { listener = listenerConfig.getImplementation(); } else if (listenerConfig.getClassName() != null) { try { listener = ClassLoaderUtil.newInstance(nodeEngine.getConfigClassLoader(), listenerConfig.getClassName()); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } } if (listener != null) { if (listener instanceof HazelcastInstanceAware) { ((HazelcastInstanceAware) listener).setHazelcastInstance(nodeEngine.getHazelcastInstance()); } if (listenerConfig.isLocal()) { addLocalEntryListener(listener); } else { addEntryListenerInternal(listener, null, listenerConfig.isIncludeValue()); } } } } // this operation returns the object in data format except it is got from near-cache and near-cache memory format is object. protected Object getInternal(Data key) { final MapService mapService = getService(); final boolean nearCacheEnabled = mapConfig.isNearCacheEnabled(); if (nearCacheEnabled) { Object cached = mapService.getFromNearCache(name, key); if (cached != null) { mapService.interceptAfterGet(name, cached); return cached; } } // todo action for read-backup true is not well tested. if (mapConfig.isReadBackupData()) { int backupCount = mapConfig.getTotalBackupCount(); PartitionService partitionService = mapService.getNodeEngine().getPartitionService(); for (int i = 0; i <= backupCount; i++) { int partitionId = partitionService.getPartitionId(key); PartitionView partition = partitionService.getPartition(partitionId); if (getNodeEngine().getThisAddress().equals(partition.getReplicaAddress(i))) { Object val = mapService.getPartitionContainer(partitionId).getRecordStore(name).get(key); if (val != null) { mapService.interceptAfterGet(name, val); return val; } } } } GetOperation operation = new GetOperation(name, key); Data result = (Data) invokeOperation(key, operation); if (nearCacheEnabled) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); if (!nodeEngine.getPartitionService().getPartitionOwner(partitionId).equals(nodeEngine.getClusterService().getThisAddress())) { mapService.putNearCache(name, key, result); } } return result; } protected Future<Data> getAsyncInternal(final Data key) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); GetOperation operation = new GetOperation(name, key); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); return invocation.invoke(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Data putInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { PutOperation operation = new PutOperation(name, key, value, getTimeInMillis(ttl, timeunit)); return (Data) invokeOperation(key, operation); } protected boolean tryPutInternal(final Data key, final Data value, final long timeout, final TimeUnit timeunit) { TryPutOperation operation = new TryPutOperation(name, key, value, getTimeInMillis(timeout, timeunit)); return (Boolean) invokeOperation(key, operation); } protected Data putIfAbsentInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { PutIfAbsentOperation operation = new PutIfAbsentOperation(name, key, value, getTimeInMillis(ttl, timeunit)); return (Data) invokeOperation(key, operation); } protected void putTransientInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { PutTransientOperation operation = new PutTransientOperation(name, key, value, getTimeInMillis(ttl, timeunit)); invokeOperation(key, operation); } private Object invokeOperation(Data key, KeyBasedMapOperation operation) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); Future f; Object o; if (mapConfig.isStatisticsEnabled()) { long time = System.currentTimeMillis(); f = invocation.invoke(); o = f.get(); if (operation instanceof BasePutOperation) localMapStats.incrementPuts(System.currentTimeMillis() - time); else if (operation instanceof BaseRemoveOperation) localMapStats.incrementRemoves(System.currentTimeMillis() - time); else if (operation instanceof GetOperation) localMapStats.incrementGets(System.currentTimeMillis() - time); } else { f = invocation.invoke(); o = f.get(); } return o; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Future<Data> putAsyncInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); PutOperation operation = new PutOperation(name, key, value, getTimeInMillis(ttl, timeunit)); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); return invocation.invoke(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected boolean replaceInternal(final Data key, final Data oldValue, final Data newValue) { ReplaceIfSameOperation operation = new ReplaceIfSameOperation(name, key, oldValue, newValue); return (Boolean) invokeOperation(key, operation); } protected Data replaceInternal(final Data key, final Data value) { ReplaceOperation operation = new ReplaceOperation(name, key, value); return (Data) invokeOperation(key, operation); } protected void setInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { SetOperation operation = new SetOperation(name, key, value, timeunit.toMillis(ttl)); invokeOperation(key, operation); } protected boolean evictInternal(final Data key) { EvictOperation operation = new EvictOperation(name, key, false); return (Boolean) invokeOperation(key, operation); } protected Data removeInternal(Data key) { RemoveOperation operation = new RemoveOperation(name, key); return (Data) invokeOperation(key, operation); } protected void deleteInternal(Data key) { RemoveOperation operation = new RemoveOperation(name, key); invokeOperation(key, operation); } protected boolean removeInternal(final Data key, final Data value) { RemoveIfSameOperation operation = new RemoveIfSameOperation(name, key, value); return (Boolean) invokeOperation(key, operation); } protected boolean tryRemoveInternal(final Data key, final long timeout, final TimeUnit timeunit) { TryRemoveOperation operation = new TryRemoveOperation(name, key, getTimeInMillis(timeout, timeunit)); return (Boolean) invokeOperation(key, operation); } protected Future<Data> removeAsyncInternal(final Data key) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); RemoveOperation operation = new RemoveOperation(name, key); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); return invocation.invoke(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected boolean containsKeyInternal(Data key) { // TODO: containsKey should check near-cache first! final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); ContainsKeyOperation containsKeyOperation = new ContainsKeyOperation(name, key); containsKeyOperation.setServiceName(SERVICE_NAME); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, containsKeyOperation, partitionId).build(); Future f = invocation.invoke(); return (Boolean) getService().toObject(f.get()); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public int size() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new SizeOperationFactory(name)); int total = 0; for (Object result : results.values()) { Integer size = (Integer) getService().toObject(result); total += size; } return total; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public boolean containsValueInternal(Data dataValue) { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new ContainsValueOperationFactory(name, dataValue)); for (Object result : results.values()) { Boolean contains = (Boolean) getService().toObject(result); if (contains) return true; } return false; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public boolean isEmpty() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapIsEmptyOperation(name), nodeEngine)); for (Object result : results.values()) { if (!(Boolean) getService().toObject(result)) return false; } return true; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Map<Data, Data> getAllDataInternal(final Set<Data> keys) { return null; } protected Map<Object, Object> getAllObjectInternal(final Set<Data> keys) { final NodeEngine nodeEngine = getNodeEngine(); Map<Object, Object> result = new HashMap<Object, Object>(); Map<Integer, Object> responses = null; try { responses = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new MapGetAllOperationFactory(name, keys)); for (Object response : responses.values()) { Set<Map.Entry<Data, Data>> entries = ((MapEntrySet) getService().toObject(response)).getEntrySet(); for (Entry<Data, Data> entry : entries) { result.put(getService().toObject(entry.getKey()), getService().toObject(entry.getValue())); } } } catch (Exception e) { throw ExceptionUtil.rethrow(e); } return result; } protected void putAllInternal(final Map<? extends Object, ? extends Object> entries) { final NodeEngine nodeEngine = getNodeEngine(); final MapService mapService = getService(); int factor = 3; PartitionService partitionService = nodeEngine.getPartitionService(); OperationService operationService = nodeEngine.getOperationService(); int partitionCount = partitionService.getPartitionCount(); boolean tooManyEntries = entries.size() > (partitionCount * factor); try { if (tooManyEntries) { List<Future> flist = new LinkedList<Future>(); Map<Integer, MapEntrySet> entryMap = new HashMap<Integer, MapEntrySet>(); for (Entry entry : entries.entrySet()) { int partitionId = partitionService.getPartitionId(entry.getKey()); if(!entryMap.containsKey(partitionId)) { entryMap.put(partitionId, new MapEntrySet()); } entryMap.get(partitionId).add(new AbstractMap.SimpleImmutableEntry<Data,Data>(mapService.toData(entry.getKey()), mapService.toData(entry.getValue()))); } for (Integer partitionId : entryMap.keySet()) { PutAllOperation op = new PutAllOperation(name, entryMap.get(partitionId)); op.setPartitionId(partitionId); flist.add(operationService.createInvocationBuilder(SERVICE_NAME, op, partitionId).build().invoke()); } for (Future future : flist) { future.get(); } } else { for (Entry entry : entries.entrySet()) { if(entry.getValue() == null){ throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED); } putInternal(mapService.toData(entry.getKey()), mapService.toData(entry.getValue()), -1, TimeUnit.SECONDS); } } } catch (Exception e) { throw ExceptionUtil.rethrow(e); } } protected Set<Data> keySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapKeySetOperation(name), nodeEngine)); Set<Data> keySet = new HashSet<Data>(); for (Object result : results.values()) { Set keys = ((MapKeySet) getService().toObject(result)).getKeySet(); keySet.addAll(keys); } return keySet; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Set<Data> localKeySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnTargetPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapKeySetOperation(name), nodeEngine), nodeEngine.getThisAddress()); Set<Data> keySet = new HashSet<Data>(); for (Object result : results.values()) { Set keys = ((MapKeySet) getService().toObject(result)).getKeySet(); keySet.addAll(keys); } return keySet; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public void flush() { final NodeEngine nodeEngine = getNodeEngine(); try { // todo add a feature to mancenter to sync cache to db completely nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapFlushOperation(name), nodeEngine)); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Collection<Data> valuesInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapValuesOperation(name), nodeEngine)); List<Data> values = new ArrayList<Data>(); for (Object result : results.values()) { values.addAll(((MapValueCollection) getService().toObject(result)).getValues()); } return values; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public void clearInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { ClearOperation clearOperation = new ClearOperation(name); clearOperation.setServiceName(SERVICE_NAME); nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(clearOperation, nodeEngine)); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public String addMapInterceptorInternal(MapInterceptor interceptor) { final NodeEngine nodeEngine = getNodeEngine(); final MapService mapService = getService(); String id = mapService.addInterceptor(name, interceptor); AddInterceptorOperation operation = new AddInterceptorOperation(id, interceptor, name); Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList(); for (MemberImpl member : members) { try { if (member.localMember()) continue; Invocation invocation = nodeEngine.getOperationService() .createInvocationBuilder(SERVICE_NAME, operation, member.getAddress()).build(); invocation.invoke().get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } return id; } public void removeMapInterceptorInternal(String id) { final NodeEngine nodeEngine = getNodeEngine(); final MapService mapService = getService(); mapService.removeInterceptor(name, id); RemoveInterceptorOperation operation = new RemoveInterceptorOperation(name, id); Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList(); for (Member member : members) { try { if (member.localMember()) continue; MemberImpl memberImpl = (MemberImpl) member; Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, memberImpl.getAddress()).build(); invocation.invoke().get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } } public String addLocalEntryListener(final EntryListener listener) { final MapService mapService = getService(); return mapService.addLocalEventListener(listener, name); } protected String addEntryListenerInternal(final EntryListener listener, final Data key, final boolean includeValue) { EventFilter eventFilter = new EntryEventFilter(includeValue, key); final MapService mapService = getService(); return mapService.addEventListener(listener, eventFilter, name); } protected String addEntryListenerInternal(EntryListener listener, Predicate predicate, final Data key, final boolean includeValue) { EventFilter eventFilter = new QueryEventFilter(includeValue, key, predicate); final MapService mapService = getService(); return mapService.addEventListener(listener, eventFilter, name); } protected boolean removeEntryListenerInternal(String id) { final MapService mapService = getService(); return mapService.removeEventListener(name, id); } protected EntryView getEntryViewInternal(final Data key) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); GetEntryViewOperation getEntryViewOperation = new GetEntryViewOperation(name, key); getEntryViewOperation.setServiceName(SERVICE_NAME); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, getEntryViewOperation, partitionId).build(); Future f = invocation.invoke(); Object o = getService().toObject(f.get()); return (EntryView) o; } catch (Throwable t) { throw new RuntimeException(t); } } protected Set<Entry<Data, Data>> entrySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapEntrySetOperation(name), nodeEngine)); Set<Entry<Data, Data>> entrySet = new HashSet<Entry<Data, Data>>(); for (Object result : results.values()) { Set entries = ((MapEntrySet) getService().toObject(result)).getEntrySet(); if (entries != null) entrySet.addAll(entries); } return entrySet; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public Data executeOnKeyInternal(Data key, EntryProcessor entryProcessor) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); EntryOperation operation = new EntryOperation(name, key, entryProcessor); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); Future future = invocation.invoke(); return (Data) future.get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public Map executeOnEntries(EntryProcessor entryProcessor) { Map result = new HashMap(); try { NodeEngine nodeEngine = getNodeEngine(); PartitionWideEntryOperation operation = new PartitionWideEntryOperation(name, entryProcessor); operation.setServiceName(SERVICE_NAME); Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(operation, nodeEngine)); for (Object o : results.values()) { if (o != null) { final MapService service = getService(); final MapEntrySet mapEntrySet = (MapEntrySet) o; for (Entry<Data, Data> entry : mapEntrySet.getEntrySet()) { result.put(service.toObject(entry.getKey()), service.toObject(entry.getValue())); } } } } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } return result; } protected Set queryLocal(final Predicate predicate, final IterationType iterationType, final boolean dataResult) { final NodeEngine nodeEngine = getNodeEngine(); OperationService operationService = nodeEngine.getOperationService(); List<Integer> partitionIds = nodeEngine.getPartitionService().getMemberPartitions(nodeEngine.getThisAddress()); QueryResultSet result = new QueryResultSet(nodeEngine.getSerializationService(), iterationType, dataResult); List<Integer> returnedPartitionIds = new ArrayList<Integer>(); try { Invocation invocation = operationService .createInvocationBuilder(SERVICE_NAME, new QueryOperation(name, predicate), nodeEngine.getThisAddress()).build(); Future future = invocation.invoke(); QueryResult queryResult = (QueryResult) future.get(); if (queryResult != null) { returnedPartitionIds = queryResult.getPartitionIds(); result.addAll(queryResult.getResult()); } if (returnedPartitionIds.size() == partitionIds.size()) { return result; } List<Integer> missingList = new ArrayList<Integer>(); for (Integer partitionId : partitionIds) { if (!returnedPartitionIds.contains(partitionId)) missingList.add(partitionId); } List<Future> futures = new ArrayList<Future>(missingList.size()); for (Integer pid : missingList) { QueryPartitionOperation queryPartitionOperation = new QueryPartitionOperation(name, predicate); queryPartitionOperation.setPartitionId(pid); try { Future f = operationService.createInvocationBuilder(SERVICE_NAME, queryPartitionOperation, pid).build().invoke(); futures.add(f); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } for (Future f : futures) { QueryResult qResult = (QueryResult) f.get(); result.addAll(qResult.getResult()); } } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } return result; } protected Set query(final Predicate predicate, final IterationType iterationType, final boolean dataResult) { final NodeEngine nodeEngine = getNodeEngine(); OperationService operationService = nodeEngine.getOperationService(); Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList(); int partitionCount = nodeEngine.getPartitionService().getPartitionCount(); Set<Integer> plist = new HashSet<Integer>(partitionCount); QueryResultSet result = new QueryResultSet(nodeEngine.getSerializationService(), iterationType, dataResult); try { List<Future> flist = new ArrayList<Future>(); for (MemberImpl member : members) { Invocation invocation = operationService .createInvocationBuilder(SERVICE_NAME, new QueryOperation(name, predicate), member.getAddress()).build(); Future future = invocation.invoke(); flist.add(future); } for (Future future : flist) { QueryResult queryResult = (QueryResult) future.get(); if (queryResult != null) { final List<Integer> partitionIds = queryResult.getPartitionIds(); if (partitionIds != null) { plist.addAll(partitionIds); result.addAll(queryResult.getResult()); } } } if (plist.size() == partitionCount) { return result; } List<Integer> missingList = new ArrayList<Integer>(); for (int i = 0; i < partitionCount; i++) { if (!plist.contains(i)) { missingList.add(i); } } List<Future> futures = new ArrayList<Future>(missingList.size()); for (Integer pid : missingList) { QueryPartitionOperation queryPartitionOperation = new QueryPartitionOperation(name, predicate); queryPartitionOperation.setPartitionId(pid); try { Future f = operationService.createInvocationBuilder(SERVICE_NAME, queryPartitionOperation, pid).build().invoke(); futures.add(f); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } for (Future future : futures) { QueryResult queryResult = (QueryResult) future.get(); result.addAll(queryResult.getResult()); } } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } return result; } public void addIndex(final String attribute, final boolean ordered) { final NodeEngine nodeEngine = getNodeEngine(); if (attribute == null) throw new IllegalArgumentException("attribute name cannot be null"); try { AddIndexOperation addIndexOperation = new AddIndexOperation(name, attribute, ordered); Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(addIndexOperation, nodeEngine)); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public LocalMapStats getLocalMapStats() { return getService().createLocalMapStats(name); } protected long getTimeInMillis(final long time, final TimeUnit timeunit) { return timeunit != null ? timeunit.toMillis(time) : time; } public final Object getId() { return name; } public final String getName() { return name; } public final String getServiceName() { return SERVICE_NAME; } }
hazelcast/src/main/java/com/hazelcast/map/proxy/MapProxySupport.java
/* * Copyright (c) 2008-2013, Hazelcast, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.hazelcast.map.proxy; import com.hazelcast.concurrent.lock.proxy.LockProxySupport; import com.hazelcast.config.EntryListenerConfig; import com.hazelcast.config.MapConfig; import com.hazelcast.config.MapIndexConfig; import com.hazelcast.core.EntryListener; import com.hazelcast.core.EntryView; import com.hazelcast.core.HazelcastInstanceAware; import com.hazelcast.core.Member; import com.hazelcast.instance.MemberImpl; import com.hazelcast.map.*; import com.hazelcast.map.operation.*; import com.hazelcast.monitor.LocalMapStats; import com.hazelcast.monitor.impl.LocalMapStatsImpl; import com.hazelcast.nio.ClassLoaderUtil; import com.hazelcast.nio.serialization.Data; import com.hazelcast.partition.PartitionService; import com.hazelcast.partition.PartitionView; import com.hazelcast.query.Predicate; import com.hazelcast.spi.*; import com.hazelcast.spi.impl.BinaryOperationFactory; import com.hazelcast.util.ExceptionUtil; import com.hazelcast.util.IterationType; import com.hazelcast.util.QueryResultSet; import com.hazelcast.util.ThreadUtil; import java.util.*; import java.util.Map.Entry; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import static com.hazelcast.map.MapService.SERVICE_NAME; /** * @author enesakar 1/17/13 */ abstract class MapProxySupport extends AbstractDistributedObject<MapService> { protected static final String NULL_KEY_IS_NOT_ALLOWED = "Null key is not allowed!"; protected static final String NULL_VALUE_IS_NOT_ALLOWED = "Null value is not allowed!"; protected final String name; protected final MapConfig mapConfig; protected final LocalMapStatsImpl localMapStats; protected final LockProxySupport lockSupport; protected MapProxySupport(final String name, final MapService service, NodeEngine nodeEngine) { super(nodeEngine, service); this.name = name; mapConfig = service.getMapContainer(name).getMapConfig(); localMapStats = service.getLocalMapStatsImpl(name); lockSupport = new LockProxySupport(new DefaultObjectNamespace(MapService.SERVICE_NAME, name)); initializeListeners(nodeEngine); initializeIndexes(); } private void initializeIndexes() { for (MapIndexConfig index : mapConfig.getMapIndexConfigs()) { if (index.getAttribute() != null) { addIndex(index.getAttribute(), index.isOrdered()); } } } private void initializeListeners(NodeEngine nodeEngine) { List<EntryListenerConfig> listenerConfigs = mapConfig.getEntryListenerConfigs(); for (EntryListenerConfig listenerConfig : listenerConfigs) { EntryListener listener = null; if (listenerConfig.getImplementation() != null) { listener = listenerConfig.getImplementation(); } else if (listenerConfig.getClassName() != null) { try { listener = ClassLoaderUtil.newInstance(nodeEngine.getConfigClassLoader(), listenerConfig.getClassName()); } catch (Exception e) { throw ExceptionUtil.rethrow(e); } } if (listener != null) { if (listener instanceof HazelcastInstanceAware) { ((HazelcastInstanceAware) listener).setHazelcastInstance(nodeEngine.getHazelcastInstance()); } if (listenerConfig.isLocal()) { addLocalEntryListener(listener); } else { addEntryListenerInternal(listener, null, listenerConfig.isIncludeValue()); } } } } // this operation returns the object in data format except it is got from near-cache and near-cache memory format is object. protected Object getInternal(Data key) { final MapService mapService = getService(); final boolean nearCacheEnabled = mapConfig.isNearCacheEnabled(); if (nearCacheEnabled) { Object cached = mapService.getFromNearCache(name, key); if (cached != null) { mapService.interceptAfterGet(name, cached); return cached; } } // todo action for read-backup true is not well tested. if (mapConfig.isReadBackupData()) { int backupCount = mapConfig.getTotalBackupCount(); PartitionService partitionService = mapService.getNodeEngine().getPartitionService(); for (int i = 0; i <= backupCount; i++) { int partitionId = partitionService.getPartitionId(key); PartitionView partition = partitionService.getPartition(partitionId); if (partition.getReplicaAddress(i).equals(getNodeEngine().getThisAddress())) { Object val = mapService.getPartitionContainer(partitionId).getRecordStore(name).get(key); if (val != null) { mapService.interceptAfterGet(name, val); return val; } } } } GetOperation operation = new GetOperation(name, key); Data result = (Data) invokeOperation(key, operation); if (nearCacheEnabled) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); if (!nodeEngine.getPartitionService().getPartitionOwner(partitionId).equals(nodeEngine.getClusterService().getThisAddress())) { mapService.putNearCache(name, key, result); } } return result; } protected Future<Data> getAsyncInternal(final Data key) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); GetOperation operation = new GetOperation(name, key); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); return invocation.invoke(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Data putInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { PutOperation operation = new PutOperation(name, key, value, getTimeInMillis(ttl, timeunit)); return (Data) invokeOperation(key, operation); } protected boolean tryPutInternal(final Data key, final Data value, final long timeout, final TimeUnit timeunit) { TryPutOperation operation = new TryPutOperation(name, key, value, getTimeInMillis(timeout, timeunit)); return (Boolean) invokeOperation(key, operation); } protected Data putIfAbsentInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { PutIfAbsentOperation operation = new PutIfAbsentOperation(name, key, value, getTimeInMillis(ttl, timeunit)); return (Data) invokeOperation(key, operation); } protected void putTransientInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { PutTransientOperation operation = new PutTransientOperation(name, key, value, getTimeInMillis(ttl, timeunit)); invokeOperation(key, operation); } private Object invokeOperation(Data key, KeyBasedMapOperation operation) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); Future f; Object o; if (mapConfig.isStatisticsEnabled()) { long time = System.currentTimeMillis(); f = invocation.invoke(); o = f.get(); if (operation instanceof BasePutOperation) localMapStats.incrementPuts(System.currentTimeMillis() - time); else if (operation instanceof BaseRemoveOperation) localMapStats.incrementRemoves(System.currentTimeMillis() - time); else if (operation instanceof GetOperation) localMapStats.incrementGets(System.currentTimeMillis() - time); } else { f = invocation.invoke(); o = f.get(); } return o; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Future<Data> putAsyncInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); PutOperation operation = new PutOperation(name, key, value, getTimeInMillis(ttl, timeunit)); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); return invocation.invoke(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected boolean replaceInternal(final Data key, final Data oldValue, final Data newValue) { ReplaceIfSameOperation operation = new ReplaceIfSameOperation(name, key, oldValue, newValue); return (Boolean) invokeOperation(key, operation); } protected Data replaceInternal(final Data key, final Data value) { ReplaceOperation operation = new ReplaceOperation(name, key, value); return (Data) invokeOperation(key, operation); } protected void setInternal(final Data key, final Data value, final long ttl, final TimeUnit timeunit) { SetOperation operation = new SetOperation(name, key, value, timeunit.toMillis(ttl)); invokeOperation(key, operation); } protected boolean evictInternal(final Data key) { EvictOperation operation = new EvictOperation(name, key, false); return (Boolean) invokeOperation(key, operation); } protected Data removeInternal(Data key) { RemoveOperation operation = new RemoveOperation(name, key); return (Data) invokeOperation(key, operation); } protected void deleteInternal(Data key) { RemoveOperation operation = new RemoveOperation(name, key); invokeOperation(key, operation); } protected boolean removeInternal(final Data key, final Data value) { RemoveIfSameOperation operation = new RemoveIfSameOperation(name, key, value); return (Boolean) invokeOperation(key, operation); } protected boolean tryRemoveInternal(final Data key, final long timeout, final TimeUnit timeunit) { TryRemoveOperation operation = new TryRemoveOperation(name, key, getTimeInMillis(timeout, timeunit)); return (Boolean) invokeOperation(key, operation); } protected Future<Data> removeAsyncInternal(final Data key) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); RemoveOperation operation = new RemoveOperation(name, key); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); return invocation.invoke(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected boolean containsKeyInternal(Data key) { // TODO: containsKey should check near-cache first! final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); ContainsKeyOperation containsKeyOperation = new ContainsKeyOperation(name, key); containsKeyOperation.setServiceName(SERVICE_NAME); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, containsKeyOperation, partitionId).build(); Future f = invocation.invoke(); return (Boolean) getService().toObject(f.get()); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public int size() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new SizeOperationFactory(name)); int total = 0; for (Object result : results.values()) { Integer size = (Integer) getService().toObject(result); total += size; } return total; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public boolean containsValueInternal(Data dataValue) { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new ContainsValueOperationFactory(name, dataValue)); for (Object result : results.values()) { Boolean contains = (Boolean) getService().toObject(result); if (contains) return true; } return false; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public boolean isEmpty() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapIsEmptyOperation(name), nodeEngine)); for (Object result : results.values()) { if (!(Boolean) getService().toObject(result)) return false; } return true; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Map<Data, Data> getAllDataInternal(final Set<Data> keys) { return null; } protected Map<Object, Object> getAllObjectInternal(final Set<Data> keys) { final NodeEngine nodeEngine = getNodeEngine(); Map<Object, Object> result = new HashMap<Object, Object>(); Map<Integer, Object> responses = null; try { responses = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new MapGetAllOperationFactory(name, keys)); for (Object response : responses.values()) { Set<Map.Entry<Data, Data>> entries = ((MapEntrySet) getService().toObject(response)).getEntrySet(); for (Entry<Data, Data> entry : entries) { result.put(getService().toObject(entry.getKey()), getService().toObject(entry.getValue())); } } } catch (Exception e) { throw ExceptionUtil.rethrow(e); } return result; } protected void putAllInternal(final Map<? extends Object, ? extends Object> entries) { final NodeEngine nodeEngine = getNodeEngine(); final MapService mapService = getService(); int factor = 3; PartitionService partitionService = nodeEngine.getPartitionService(); OperationService operationService = nodeEngine.getOperationService(); int partitionCount = partitionService.getPartitionCount(); boolean tooManyEntries = entries.size() > (partitionCount * factor); try { if (tooManyEntries) { List<Future> flist = new LinkedList<Future>(); Map<Integer, MapEntrySet> entryMap = new HashMap<Integer, MapEntrySet>(); for (Entry entry : entries.entrySet()) { int partitionId = partitionService.getPartitionId(entry.getKey()); if(!entryMap.containsKey(partitionId)) { entryMap.put(partitionId, new MapEntrySet()); } entryMap.get(partitionId).add(new AbstractMap.SimpleImmutableEntry<Data,Data>(mapService.toData(entry.getKey()), mapService.toData(entry.getValue()))); } for (Integer partitionId : entryMap.keySet()) { PutAllOperation op = new PutAllOperation(name, entryMap.get(partitionId)); op.setPartitionId(partitionId); flist.add(operationService.createInvocationBuilder(SERVICE_NAME, op, partitionId).build().invoke()); } for (Future future : flist) { future.get(); } } else { for (Entry entry : entries.entrySet()) { if(entry.getValue() == null){ throw new NullPointerException(NULL_VALUE_IS_NOT_ALLOWED); } putInternal(mapService.toData(entry.getKey()), mapService.toData(entry.getValue()), -1, TimeUnit.SECONDS); } } } catch (Exception e) { throw ExceptionUtil.rethrow(e); } } protected Set<Data> keySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapKeySetOperation(name), nodeEngine)); Set<Data> keySet = new HashSet<Data>(); for (Object result : results.values()) { Set keys = ((MapKeySet) getService().toObject(result)).getKeySet(); keySet.addAll(keys); } return keySet; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Set<Data> localKeySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnTargetPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapKeySetOperation(name), nodeEngine), nodeEngine.getThisAddress()); Set<Data> keySet = new HashSet<Data>(); for (Object result : results.values()) { Set keys = ((MapKeySet) getService().toObject(result)).getKeySet(); keySet.addAll(keys); } return keySet; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public void flush() { final NodeEngine nodeEngine = getNodeEngine(); try { // todo add a feature to mancenter to sync cache to db completely nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapFlushOperation(name), nodeEngine)); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } protected Collection<Data> valuesInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapValuesOperation(name), nodeEngine)); List<Data> values = new ArrayList<Data>(); for (Object result : results.values()) { values.addAll(((MapValueCollection) getService().toObject(result)).getValues()); } return values; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public void clearInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { ClearOperation clearOperation = new ClearOperation(name); clearOperation.setServiceName(SERVICE_NAME); nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(clearOperation, nodeEngine)); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public String addMapInterceptorInternal(MapInterceptor interceptor) { final NodeEngine nodeEngine = getNodeEngine(); final MapService mapService = getService(); String id = mapService.addInterceptor(name, interceptor); AddInterceptorOperation operation = new AddInterceptorOperation(id, interceptor, name); Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList(); for (MemberImpl member : members) { try { if (member.localMember()) continue; Invocation invocation = nodeEngine.getOperationService() .createInvocationBuilder(SERVICE_NAME, operation, member.getAddress()).build(); invocation.invoke().get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } return id; } public void removeMapInterceptorInternal(String id) { final NodeEngine nodeEngine = getNodeEngine(); final MapService mapService = getService(); mapService.removeInterceptor(name, id); RemoveInterceptorOperation operation = new RemoveInterceptorOperation(name, id); Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList(); for (Member member : members) { try { if (member.localMember()) continue; MemberImpl memberImpl = (MemberImpl) member; Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, memberImpl.getAddress()).build(); invocation.invoke().get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } } public String addLocalEntryListener(final EntryListener listener) { final MapService mapService = getService(); return mapService.addLocalEventListener(listener, name); } protected String addEntryListenerInternal(final EntryListener listener, final Data key, final boolean includeValue) { EventFilter eventFilter = new EntryEventFilter(includeValue, key); final MapService mapService = getService(); return mapService.addEventListener(listener, eventFilter, name); } protected String addEntryListenerInternal(EntryListener listener, Predicate predicate, final Data key, final boolean includeValue) { EventFilter eventFilter = new QueryEventFilter(includeValue, key, predicate); final MapService mapService = getService(); return mapService.addEventListener(listener, eventFilter, name); } protected boolean removeEntryListenerInternal(String id) { final MapService mapService = getService(); return mapService.removeEventListener(name, id); } protected EntryView getEntryViewInternal(final Data key) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); GetEntryViewOperation getEntryViewOperation = new GetEntryViewOperation(name, key); getEntryViewOperation.setServiceName(SERVICE_NAME); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, getEntryViewOperation, partitionId).build(); Future f = invocation.invoke(); Object o = getService().toObject(f.get()); return (EntryView) o; } catch (Throwable t) { throw new RuntimeException(t); } } protected Set<Entry<Data, Data>> entrySetInternal() { final NodeEngine nodeEngine = getNodeEngine(); try { Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(new MapEntrySetOperation(name), nodeEngine)); Set<Entry<Data, Data>> entrySet = new HashSet<Entry<Data, Data>>(); for (Object result : results.values()) { Set entries = ((MapEntrySet) getService().toObject(result)).getEntrySet(); if (entries != null) entrySet.addAll(entries); } return entrySet; } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public Data executeOnKeyInternal(Data key, EntryProcessor entryProcessor) { final NodeEngine nodeEngine = getNodeEngine(); int partitionId = nodeEngine.getPartitionService().getPartitionId(key); EntryOperation operation = new EntryOperation(name, key, entryProcessor); operation.setThreadId(ThreadUtil.getThreadId()); try { Invocation invocation = nodeEngine.getOperationService().createInvocationBuilder(SERVICE_NAME, operation, partitionId) .build(); Future future = invocation.invoke(); return (Data) future.get(); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public Map executeOnEntries(EntryProcessor entryProcessor) { Map result = new HashMap(); try { NodeEngine nodeEngine = getNodeEngine(); PartitionWideEntryOperation operation = new PartitionWideEntryOperation(name, entryProcessor); operation.setServiceName(SERVICE_NAME); Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(operation, nodeEngine)); for (Object o : results.values()) { if (o != null) { final MapService service = getService(); final MapEntrySet mapEntrySet = (MapEntrySet) o; for (Entry<Data, Data> entry : mapEntrySet.getEntrySet()) { result.put(service.toObject(entry.getKey()), service.toObject(entry.getValue())); } } } } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } return result; } protected Set queryLocal(final Predicate predicate, final IterationType iterationType, final boolean dataResult) { final NodeEngine nodeEngine = getNodeEngine(); OperationService operationService = nodeEngine.getOperationService(); List<Integer> partitionIds = nodeEngine.getPartitionService().getMemberPartitions(nodeEngine.getThisAddress()); QueryResultSet result = new QueryResultSet(nodeEngine.getSerializationService(), iterationType, dataResult); List<Integer> returnedPartitionIds = new ArrayList<Integer>(); try { Invocation invocation = operationService .createInvocationBuilder(SERVICE_NAME, new QueryOperation(name, predicate), nodeEngine.getThisAddress()).build(); Future future = invocation.invoke(); QueryResult queryResult = (QueryResult) future.get(); if (queryResult != null) { returnedPartitionIds = queryResult.getPartitionIds(); result.addAll(queryResult.getResult()); } if (returnedPartitionIds.size() == partitionIds.size()) { return result; } List<Integer> missingList = new ArrayList<Integer>(); for (Integer partitionId : partitionIds) { if (!returnedPartitionIds.contains(partitionId)) missingList.add(partitionId); } List<Future> futures = new ArrayList<Future>(missingList.size()); for (Integer pid : missingList) { QueryPartitionOperation queryPartitionOperation = new QueryPartitionOperation(name, predicate); queryPartitionOperation.setPartitionId(pid); try { Future f = operationService.createInvocationBuilder(SERVICE_NAME, queryPartitionOperation, pid).build().invoke(); futures.add(f); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } for (Future f : futures) { QueryResult qResult = (QueryResult) f.get(); result.addAll(qResult.getResult()); } } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } return result; } protected Set query(final Predicate predicate, final IterationType iterationType, final boolean dataResult) { final NodeEngine nodeEngine = getNodeEngine(); OperationService operationService = nodeEngine.getOperationService(); Collection<MemberImpl> members = nodeEngine.getClusterService().getMemberList(); int partitionCount = nodeEngine.getPartitionService().getPartitionCount(); Set<Integer> plist = new HashSet<Integer>(partitionCount); QueryResultSet result = new QueryResultSet(nodeEngine.getSerializationService(), iterationType, dataResult); try { List<Future> flist = new ArrayList<Future>(); for (MemberImpl member : members) { Invocation invocation = operationService .createInvocationBuilder(SERVICE_NAME, new QueryOperation(name, predicate), member.getAddress()).build(); Future future = invocation.invoke(); flist.add(future); } for (Future future : flist) { QueryResult queryResult = (QueryResult) future.get(); if (queryResult != null) { final List<Integer> partitionIds = queryResult.getPartitionIds(); if (partitionIds != null) { plist.addAll(partitionIds); result.addAll(queryResult.getResult()); } } } if (plist.size() == partitionCount) { return result; } List<Integer> missingList = new ArrayList<Integer>(); for (int i = 0; i < partitionCount; i++) { if (!plist.contains(i)) { missingList.add(i); } } List<Future> futures = new ArrayList<Future>(missingList.size()); for (Integer pid : missingList) { QueryPartitionOperation queryPartitionOperation = new QueryPartitionOperation(name, predicate); queryPartitionOperation.setPartitionId(pid); try { Future f = operationService.createInvocationBuilder(SERVICE_NAME, queryPartitionOperation, pid).build().invoke(); futures.add(f); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } for (Future future : futures) { QueryResult queryResult = (QueryResult) future.get(); result.addAll(queryResult.getResult()); } } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } return result; } public void addIndex(final String attribute, final boolean ordered) { final NodeEngine nodeEngine = getNodeEngine(); if (attribute == null) throw new IllegalArgumentException("attribute name cannot be null"); try { AddIndexOperation addIndexOperation = new AddIndexOperation(name, attribute, ordered); Map<Integer, Object> results = nodeEngine.getOperationService() .invokeOnAllPartitions(SERVICE_NAME, new BinaryOperationFactory(addIndexOperation, nodeEngine)); } catch (Throwable t) { throw ExceptionUtil.rethrow(t); } } public LocalMapStats getLocalMapStats() { return getService().createLocalMapStats(name); } protected long getTimeInMillis(final long time, final TimeUnit timeunit) { return timeunit != null ? timeunit.toMillis(time) : time; } public final Object getId() { return name; } public final String getName() { return name; } public final String getServiceName() { return SERVICE_NAME; } }
Fixed read-backup-data NPE.
hazelcast/src/main/java/com/hazelcast/map/proxy/MapProxySupport.java
Fixed read-backup-data NPE.
Java
apache-2.0
e69754adb570a58bc9fc83d1f49633fd32f7dd55
0
apache/fop,chunlinyao/fop,chunlinyao/fop,chunlinyao/fop,apache/fop,chunlinyao/fop,apache/fop,chunlinyao/fop,apache/fop,apache/fop
/* * $Id$ * Copyright (C) 2001 The Apache Software Foundation. All rights reserved. * For details on use and redistribution please refer to the * LICENSE file included with these sources. */ package org.apache.fop.image; // Java import java.net.URL; import java.awt.image.ImageProducer; import java.awt.image.ColorModel; import java.awt.image.IndexColorModel; import java.awt.color.ColorSpace; // Jimi import com.sun.jimi.core.*; // FOP import org.apache.fop.pdf.PDFColor; import org.apache.fop.image.analyser.ImageReader; /** * FopImage object for several images types, using Jimi. * See Jimi documentation for supported image types. * @author Eric SCHAEFFER * @see AbstractFopImage * @see FopImage */ public class JimiImage extends AbstractFopImage { public JimiImage(URL href, FopImage.ImageInfo imgReader) { super(href, imgReader); try { Class c = Class.forName("com.sun.jimi.core.Jimi"); } catch (ClassNotFoundException e) { //throw new FopImageException("Jimi image library not available"); } } protected void loadImage() { int[] tmpMap = null; try { ImageProducer ip = Jimi.getImageProducer(this.m_href.openStream(), Jimi.SYNCHRONOUS | Jimi.IN_MEMORY); FopImageConsumer consumer = new FopImageConsumer(ip); ip.startProduction(consumer); while (!consumer.isImageReady()) { Thread.sleep(500); } this.m_height = consumer.getHeight(); this.m_width = consumer.getWidth(); try { tmpMap = consumer.getImage(); } catch (Exception ex) { /*throw new FopImageException("Image grabbing interrupted : " + ex.getMessage()); */} ColorModel cm = consumer.getColorModel(); this.m_bitsPerPixel = 8; // this.m_bitsPerPixel = cm.getPixelSize(); this.m_colorSpace = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB); if (cm.hasAlpha()) { int transparencyType = cm.getTransparency(); // java.awt.Transparency. BITMASK or OPAQUE or TRANSLUCENT if (transparencyType == java.awt.Transparency.OPAQUE) { this.m_isTransparent = false; } else if (transparencyType == java.awt.Transparency.BITMASK) { if (cm instanceof IndexColorModel) { this.m_isTransparent = false; byte[] alphas = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] reds = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] greens = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] blues = new byte[ ((IndexColorModel) cm).getMapSize()]; ((IndexColorModel) cm).getAlphas(alphas); ((IndexColorModel) cm).getReds(reds); ((IndexColorModel) cm).getGreens(greens); ((IndexColorModel) cm).getBlues(blues); for (int i = 0; i < ((IndexColorModel) cm).getMapSize(); i++) { if ((alphas[i] & 0xFF) == 0) { this.m_isTransparent = true; this.m_transparentColor = new PDFColor( (int)(reds[i] & 0xFF), (int)(greens[i] & 0xFF), (int)(blues[i] & 0xFF)); break; } } } else { // TRANSLUCENT /* * this.m_isTransparent = false; * for (int i = 0; i < this.m_width * this.m_height; i++) { * if (cm.getAlpha(tmpMap[i]) == 0) { * this.m_isTransparent = true; * this.m_transparentColor = new PDFColor(cm.getRed(tmpMap[i]), cm.getGreen(tmpMap[i]), cm.getBlue(tmpMap[i])); * break; * } * } */ // use special API... this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } catch (Exception ex) { /*throw new FopImageException("Error while loading image " + this.m_href.toString() + " : " + ex.getClass() + " - " + ex.getMessage()); */} // Should take care of the ColorSpace and bitsPerPixel this.m_bitmapsSize = this.m_width * this.m_height * 3; this.m_bitmaps = new byte[this.m_bitmapsSize]; for (int i = 0; i < this.m_height; i++) { for (int j = 0; j < this.m_width; j++) { int p = tmpMap[i * this.m_width + j]; int r = (p >> 16) & 0xFF; int g = (p >> 8) & 0xFF; int b = (p) & 0xFF; this.m_bitmaps[3 * (i * this.m_width + j)] = (byte)(r & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 1] = (byte)(g & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 2] = (byte)(b & 0xFF); } } } }
src/org/apache/fop/image/JimiImage.java
/* * $Id$ * Copyright (C) 2001 The Apache Software Foundation. All rights reserved. * For details on use and redistribution please refer to the * LICENSE file included with these sources. */ package org.apache.fop.image; // Java import java.net.URL; import java.awt.image.ImageProducer; import java.awt.image.ColorModel; import java.awt.image.IndexColorModel; import java.awt.color.ColorSpace; // Jimi import com.sun.jimi.core.*; // FOP import org.apache.fop.pdf.PDFColor; import org.apache.fop.image.analyser.ImageReader; /** * FopImage object for several images types, using Jimi. * See Jimi documentation for supported image types. * @author Eric SCHAEFFER * @see AbstractFopImage * @see FopImage */ public class JimiImage extends AbstractFopImage { public JimiImage(URL href, FopImage.ImageInfo imgReader) { super(href, imgReader); try { Class c = Class.forName("com.sun.jimi.core.Jimi"); } catch (ClassNotFoundException e) { //throw new FopImageException("Jimi image library not available"); } } protected void loadImage() { int[] tmpMap = null; try { ImageProducer ip = Jimi.getImageProducer(this.m_href.openStream(), Jimi.SYNCHRONOUS | Jimi.IN_MEMORY); FopImageConsumer consumer = new FopImageConsumer(ip); ip.startProduction(consumer); while (!consumer.isImageReady()) { Thread.sleep(500); } this.m_height = consumer.getHeight(); this.m_width = consumer.getWidth(); try { tmpMap = consumer.getImage(); } catch (Exception ex) { /*throw new FopImageException("Image grabbing interrupted : " + ex.getMessage()); */} ColorModel cm = consumer.getColorModel(); this.m_bitsPerPixel = 8; // this.m_bitsPerPixel = cm.getPixelSize(); this.m_colorSpace = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB); if (cm.hasAlpha()) { int transparencyType = cm.getTransparency(); // java.awt.Transparency. BITMASK or OPAQUE or TRANSLUCENT if (transparencyType == java.awt.Transparency.OPAQUE) { this.m_isTransparent = false; } else if (transparencyType == java.awt.Transparency.BITMASK) { if (cm instanceof IndexColorModel) { this.m_isTransparent = false; byte[] alphas = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] reds = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] greens = new byte[ ((IndexColorModel) cm).getMapSize()]; byte[] blues = new byte[ ((IndexColorModel) cm).getMapSize()]; ((IndexColorModel) cm).getAlphas(alphas); ((IndexColorModel) cm).getReds(reds); ((IndexColorModel) cm).getGreens(greens); ((IndexColorModel) cm).getBlues(blues); for (int i = 0; i < ((IndexColorModel) cm).getMapSize(); i++) { if ((alphas[i] & 0xFF) == 0) { this.m_isTransparent = true; this.m_transparentColor = new PDFColor( (int)(reds[i] & 0xFF), (int)(greens[i] & 0xFF), (int)(blues[i] & 0xFF)); break; } } } else { // TRANSLUCENT /* * this.m_isTransparent = false; * for (int i = 0; i < this.m_width * this.m_height; i++) { * if (cm.getAlpha(tmpMap[i]) == 0) { * this.m_isTransparent = true; * this.m_transparentColor = new PDFColor(cm.getRed(tmpMap[i]), cm.getGreen(tmpMap[i]), cm.getBlue(tmpMap[i])); * break; * } * } */ // use special API... this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } else { this.m_isTransparent = false; } } catch (Exception ex) { /*throw new FopImageException("Error while loading image " + this.m_href.toString() + " : " + ex.getClass() + " - " + ex.getMessage()); */} // Should take care of the ColorSpace and bitsPerPixel this.m_bitmapsSize = this.m_width * this.m_height * 3; this.m_bitmaps = new byte[this.m_bitmapsSize]; for (int i = 0; i < this.m_height; i++) { for (int j = 0; j < this.m_width; j++) { int p = tmpMap[i * this.m_width + j]; int r = (p > > 16) & 0xFF; int g = (p > > 8) & 0xFF; int b = (p) & 0xFF; this.m_bitmaps[3 * (i * this.m_width + j)] = (byte)(r & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 1] = (byte)(g & 0xFF); this.m_bitmaps[3 * (i * this.m_width + j) + 2] = (byte)(b & 0xFF); } } } }
fixed compile error git-svn-id: 102839466c3b40dd9c7e25c0a1a6d26afc40150a@194983 13f79535-47bb-0310-9956-ffa450edef68
src/org/apache/fop/image/JimiImage.java
fixed compile error
Java
apache-2.0
b37023c9fe41b12eda3d362b2c641d94bfd3809f
0
vladmm/intellij-community,pwoodworth/intellij-community,TangHao1987/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,signed/intellij-community,FHannes/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,asedunov/intellij-community,retomerz/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,semonte/intellij-community,orekyuu/intellij-community,joewalnes/idea-community,amith01994/intellij-community,alphafoobar/intellij-community,apixandru/intellij-community,orekyuu/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,joewalnes/idea-community,jexp/idea2,hurricup/intellij-community,vladmm/intellij-community,dslomov/intellij-community,holmes/intellij-community,retomerz/intellij-community,retomerz/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,asedunov/intellij-community,jagguli/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,blademainer/intellij-community,samthor/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,Distrotech/intellij-community,holmes/intellij-community,kool79/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,nicolargo/intellij-community,youdonghai/intellij-community,Lekanich/intellij-community,samthor/intellij-community,samthor/intellij-community,fitermay/intellij-community,dslomov/intellij-community,supersven/intellij-community,asedunov/intellij-community,hurricup/intellij-community,ryano144/intellij-community,da1z/intellij-community,da1z/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,vladmm/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,blademainer/intellij-community,slisson/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,lucafavatella/intellij-community,amith01994/intellij-community,tmpgit/intellij-community,FHannes/intellij-community,holmes/intellij-community,Distrotech/intellij-community,tmpgit/intellij-community,TangHao1987/intellij-community,kdwink/intellij-community,ryano144/intellij-community,semonte/intellij-community,blademainer/intellij-community,Lekanich/intellij-community,amith01994/intellij-community,signed/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,amith01994/intellij-community,MER-GROUP/intellij-community,adedayo/intellij-community,salguarnieri/intellij-community,caot/intellij-community,robovm/robovm-studio,kool79/intellij-community,FHannes/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,ivan-fedorov/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,akosyakov/intellij-community,diorcety/intellij-community,retomerz/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,slisson/intellij-community,allotria/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,adedayo/intellij-community,youdonghai/intellij-community,amith01994/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,ernestp/consulo,adedayo/intellij-community,suncycheng/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,diorcety/intellij-community,salguarnieri/intellij-community,allotria/intellij-community,salguarnieri/intellij-community,nicolargo/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,fnouama/intellij-community,akosyakov/intellij-community,vladmm/intellij-community,fnouama/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,slisson/intellij-community,holmes/intellij-community,Distrotech/intellij-community,asedunov/intellij-community,da1z/intellij-community,diorcety/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,joewalnes/idea-community,clumsy/intellij-community,retomerz/intellij-community,fengbaicanhe/intellij-community,fitermay/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,ivan-fedorov/intellij-community,asedunov/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,TangHao1987/intellij-community,retomerz/intellij-community,SerCeMan/intellij-community,ahb0327/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,da1z/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,akosyakov/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,tmpgit/intellij-community,ol-loginov/intellij-community,signed/intellij-community,signed/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,petteyg/intellij-community,wreckJ/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,muntasirsyed/intellij-community,akosyakov/intellij-community,apixandru/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,mglukhikh/intellij-community,Distrotech/intellij-community,samthor/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,vladmm/intellij-community,muntasirsyed/intellij-community,consulo/consulo,xfournet/intellij-community,blademainer/intellij-community,kool79/intellij-community,allotria/intellij-community,xfournet/intellij-community,fengbaicanhe/intellij-community,slisson/intellij-community,ibinti/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,supersven/intellij-community,ThiagoGarciaAlves/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,diorcety/intellij-community,supersven/intellij-community,caot/intellij-community,fnouama/intellij-community,supersven/intellij-community,adedayo/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,consulo/consulo,signed/intellij-community,petteyg/intellij-community,signed/intellij-community,clumsy/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,lucafavatella/intellij-community,Lekanich/intellij-community,tmpgit/intellij-community,fitermay/intellij-community,ahb0327/intellij-community,SerCeMan/intellij-community,allotria/intellij-community,dslomov/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,ol-loginov/intellij-community,robovm/robovm-studio,izonder/intellij-community,clumsy/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,holmes/intellij-community,apixandru/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,vladmm/intellij-community,ol-loginov/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,hurricup/intellij-community,muntasirsyed/intellij-community,wreckJ/intellij-community,orekyuu/intellij-community,da1z/intellij-community,ivan-fedorov/intellij-community,jagguli/intellij-community,jexp/idea2,samthor/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,vladmm/intellij-community,da1z/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,ftomassetti/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,supersven/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,ibinti/intellij-community,semonte/intellij-community,nicolargo/intellij-community,adedayo/intellij-community,retomerz/intellij-community,kool79/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,petteyg/intellij-community,clumsy/intellij-community,amith01994/intellij-community,SerCeMan/intellij-community,ivan-fedorov/intellij-community,ahb0327/intellij-community,signed/intellij-community,jagguli/intellij-community,MER-GROUP/intellij-community,allotria/intellij-community,consulo/consulo,vvv1559/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,blademainer/intellij-community,joewalnes/idea-community,holmes/intellij-community,fengbaicanhe/intellij-community,Distrotech/intellij-community,jexp/idea2,caot/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,tmpgit/intellij-community,consulo/consulo,idea4bsd/idea4bsd,izonder/intellij-community,FHannes/intellij-community,tmpgit/intellij-community,amith01994/intellij-community,diorcety/intellij-community,Lekanich/intellij-community,ol-loginov/intellij-community,nicolargo/intellij-community,idea4bsd/idea4bsd,vvv1559/intellij-community,xfournet/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,ahb0327/intellij-community,allotria/intellij-community,youdonghai/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,samthor/intellij-community,ibinti/intellij-community,ftomassetti/intellij-community,jagguli/intellij-community,caot/intellij-community,fitermay/intellij-community,vladmm/intellij-community,pwoodworth/intellij-community,retomerz/intellij-community,hurricup/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,suncycheng/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,petteyg/intellij-community,michaelgallacher/intellij-community,gnuhub/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,idea4bsd/idea4bsd,kdwink/intellij-community,jexp/idea2,diorcety/intellij-community,joewalnes/idea-community,TangHao1987/intellij-community,jagguli/intellij-community,clumsy/intellij-community,xfournet/intellij-community,fnouama/intellij-community,semonte/intellij-community,tmpgit/intellij-community,holmes/intellij-community,FHannes/intellij-community,kdwink/intellij-community,vvv1559/intellij-community,izonder/intellij-community,orekyuu/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,hurricup/intellij-community,blademainer/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,ernestp/consulo,TangHao1987/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,FHannes/intellij-community,salguarnieri/intellij-community,retomerz/intellij-community,izonder/intellij-community,robovm/robovm-studio,mglukhikh/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,idea4bsd/idea4bsd,clumsy/intellij-community,dslomov/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,fengbaicanhe/intellij-community,tmpgit/intellij-community,adedayo/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,ivan-fedorov/intellij-community,xfournet/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,wreckJ/intellij-community,ernestp/consulo,pwoodworth/intellij-community,youdonghai/intellij-community,semonte/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,dslomov/intellij-community,nicolargo/intellij-community,allotria/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,robovm/robovm-studio,allotria/intellij-community,kool79/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,ryano144/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,slisson/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,robovm/robovm-studio,jexp/idea2,ThiagoGarciaAlves/intellij-community,youdonghai/intellij-community,slisson/intellij-community,vvv1559/intellij-community,holmes/intellij-community,ryano144/intellij-community,vvv1559/intellij-community,pwoodworth/intellij-community,ernestp/consulo,signed/intellij-community,SerCeMan/intellij-community,clumsy/intellij-community,tmpgit/intellij-community,MER-GROUP/intellij-community,fengbaicanhe/intellij-community,suncycheng/intellij-community,gnuhub/intellij-community,ol-loginov/intellij-community,joewalnes/idea-community,samthor/intellij-community,ernestp/consulo,blademainer/intellij-community,kdwink/intellij-community,kool79/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,robovm/robovm-studio,diorcety/intellij-community,caot/intellij-community,akosyakov/intellij-community,slisson/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,joewalnes/idea-community,MER-GROUP/intellij-community,michaelgallacher/intellij-community,vladmm/intellij-community,caot/intellij-community,holmes/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,joewalnes/idea-community,ibinti/intellij-community,retomerz/intellij-community,izonder/intellij-community,izonder/intellij-community,signed/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,slisson/intellij-community,kdwink/intellij-community,supersven/intellij-community,samthor/intellij-community,slisson/intellij-community,ahb0327/intellij-community,kool79/intellij-community,samthor/intellij-community,vvv1559/intellij-community,ibinti/intellij-community,ibinti/intellij-community,vladmm/intellij-community,jexp/idea2,dslomov/intellij-community,ryano144/intellij-community,clumsy/intellij-community,hurricup/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,semonte/intellij-community,clumsy/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ibinti/intellij-community,caot/intellij-community,ryano144/intellij-community,alphafoobar/intellij-community,ernestp/consulo,adedayo/intellij-community,mglukhikh/intellij-community,retomerz/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,jexp/idea2,ibinti/intellij-community,dslomov/intellij-community,kdwink/intellij-community,fnouama/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,pwoodworth/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,blademainer/intellij-community,supersven/intellij-community,asedunov/intellij-community,consulo/consulo,adedayo/intellij-community,MER-GROUP/intellij-community,lucafavatella/intellij-community,ryano144/intellij-community,consulo/consulo,ThiagoGarciaAlves/intellij-community,ftomassetti/intellij-community,fitermay/intellij-community,gnuhub/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,wreckJ/intellij-community,ftomassetti/intellij-community,akosyakov/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,signed/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,da1z/intellij-community,jexp/idea2,youdonghai/intellij-community,orekyuu/intellij-community,ThiagoGarciaAlves/intellij-community,ryano144/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,ibinti/intellij-community,slisson/intellij-community,ryano144/intellij-community,semonte/intellij-community,samthor/intellij-community,xfournet/intellij-community,izonder/intellij-community,adedayo/intellij-community,supersven/intellij-community,apixandru/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,petteyg/intellij-community,hurricup/intellij-community,akosyakov/intellij-community,akosyakov/intellij-community,retomerz/intellij-community,kool79/intellij-community,semonte/intellij-community,wreckJ/intellij-community,wreckJ/intellij-community,semonte/intellij-community,asedunov/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,supersven/intellij-community,kdwink/intellij-community,caot/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,asedunov/intellij-community,semonte/intellij-community,robovm/robovm-studio,MichaelNedzelsky/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,samthor/intellij-community,MER-GROUP/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,ThiagoGarciaAlves/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,izonder/intellij-community,MichaelNedzelsky/intellij-community,ol-loginov/intellij-community,petteyg/intellij-community,ibinti/intellij-community,apixandru/intellij-community,asedunov/intellij-community,SerCeMan/intellij-community,gnuhub/intellij-community,kool79/intellij-community,lucafavatella/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,supersven/intellij-community,ol-loginov/intellij-community,apixandru/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,nicolargo/intellij-community,supersven/intellij-community,xfournet/intellij-community,apixandru/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,jagguli/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,slisson/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,mglukhikh/intellij-community,ftomassetti/intellij-community,holmes/intellij-community,suncycheng/intellij-community,slisson/intellij-community,FHannes/intellij-community,dslomov/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,blademainer/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,allotria/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,izonder/intellij-community,vladmm/intellij-community,joewalnes/idea-community,fitermay/intellij-community,fengbaicanhe/intellij-community,petteyg/intellij-community,asedunov/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,apixandru/intellij-community,xfournet/intellij-community,signed/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,SerCeMan/intellij-community,caot/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,diorcety/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,muntasirsyed/intellij-community,da1z/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,robovm/robovm-studio,fitermay/intellij-community,vvv1559/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,ahb0327/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,izonder/intellij-community,wreckJ/intellij-community,ivan-fedorov/intellij-community,ThiagoGarciaAlves/intellij-community,diorcety/intellij-community,apixandru/intellij-community,signed/intellij-community,robovm/robovm-studio,robovm/robovm-studio,fnouama/intellij-community,youdonghai/intellij-community,izonder/intellij-community,amith01994/intellij-community,petteyg/intellij-community,amith01994/intellij-community,amith01994/intellij-community
package com.intellij.psi.formatter.java; import com.intellij.codeFormatting.general.FormatterUtil; import com.intellij.lang.ASTNode; import com.intellij.formatting.Spacing; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.codeStyle.CodeEditUtil; import com.intellij.psi.impl.source.codeStyle.ImportHelper; import com.intellij.psi.impl.source.jsp.jspJava.JspCodeBlock; import com.intellij.psi.impl.source.tree.ChildRole; import com.intellij.psi.impl.source.tree.CompositeElement; import com.intellij.psi.impl.source.tree.ElementType; import com.intellij.psi.impl.source.tree.JavaDocElementType; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; public class JavaSpacePropertyProcessor extends PsiElementVisitor { private PsiElement myParent; private int myRole1; private int myRole2; private final CodeStyleSettings mySettings; private Spacing myResult; private ASTNode myChild1; private ASTNode myChild2; private final ImportHelper myImportHelper; public JavaSpacePropertyProcessor(ASTNode child, final CodeStyleSettings settings) { init(child); mySettings = settings; myImportHelper = new ImportHelper(mySettings); if (myChild2 != null && mySettings.KEEP_FIRST_COLUMN_COMMENT && ElementType.COMMENT_BIT_SET.isInSet(myChild2.getElementType())) { myResult = Spacing .createKeepingFirstColumnSpacing(0, Integer.MAX_VALUE, mySettings.KEEP_LINE_BREAKS, getKeepBlankLines()); } else { if (myParent != null) { myParent.accept(this); if (myResult == null) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == ElementType.END_OF_LINE_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (!CodeEditUtil.canStickChildrenTogether(myChild1, myChild2)) { myResult = Spacing .createSpacing(1, Integer.MIN_VALUE, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } } } private int getKeepBlankLines() { if (myChild2.getElementType() == ElementType.RBRACE) return mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE; if (SourceTreeToPsiMap.psiElementToTree(myParent).getElementType() == ElementType .CLASS) { return mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS; } return mySettings.KEEP_BLANK_LINES_IN_CODE; } private void init(final ASTNode child) { if (child == null) return; ASTNode treePrev = child.getTreePrev(); while (isWhiteSpace(treePrev)) { treePrev = treePrev.getTreePrev(); } if (treePrev == null) { init(child.getTreeParent()); } else { myChild2 = child; myChild1 = treePrev; final CompositeElement parent = (CompositeElement)myChild1.getTreeParent(); myParent = SourceTreeToPsiMap.treeElementToPsi(parent); myRole1 = parent.getChildRole(myChild1); myRole2 = parent.getChildRole(myChild2); } } private boolean isWhiteSpace(final ASTNode treePrev) { if (treePrev == null) return false; return treePrev.getElementType() == ElementType.WHITE_SPACE || treePrev.getTextLength() == 0; } public Spacing getResult() { return myResult; } public void visitArrayAccessExpression(PsiArrayAccessExpression expression) { if (myRole1 == ChildRole.ARRAY && myRole2 == ChildRole.LBRACKET) { final boolean space = false; createSpaceInCode(space); } else if (myRole1 == ChildRole.LBRACKET || myRole2 == ChildRole.RBRACKET) { createSpaceInCode(mySettings.SPACE_WITHIN_BRACKETS); } } private void createSpaceInCode(final boolean space) { createSpaceProperty(space, mySettings.KEEP_BLANK_LINES_IN_CODE); } public void visitNewExpression(PsiNewExpression expression) { if (myRole2 == ChildRole.ARRAY_INITIALIZER) { createSpaceInCode(mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE); } else if (myRole1 == ChildRole.NEW_KEYWORD) { createSpaceInCode(true); } else if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } public void visitArrayInitializerExpression(PsiArrayInitializerExpression expression) { if (myRole1 == ChildRole.LBRACE) { createSpaceInCode(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES); } else if (myRole2 == ChildRole.LBRACE) { createSpaceInCode(mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE); } else if (myRole2 == ChildRole.RBRACE) { createSpaceProperty(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_AFTER_COMMA); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_BEFORE_COMMA); } } public void visitClass(PsiClass aClass) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole2 == ChildRole.LBRACE) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CLASS_LBRACE, mySettings.CLASS_BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), false); } else if (myRole2 == ChildRole.METHOD || myRole2 == ChildRole.CLASS_INITIALIZER) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_METHOD + 1; myResult = Spacing .createSpacing(0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.METHOD || myRole1 == ChildRole.CLASS_INITIALIZER) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_METHOD + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.CLASS) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.CLASS) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.FIELD) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_FIELD + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.FIELD) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_FIELD + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (myRole1 == ChildRole.LBRACE && myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else if (myRole2 == ChildRole.EXTENDS_LIST || myRole2 == ChildRole.IMPLEMENTS_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_TYPE_PARAMETER_LIST); } else if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(false); } } public void visitImportList(PsiImportList list) { if (ElementType.IMPORT_STATEMENT_BASE_BIT_SET.isInSet(myChild1.getElementType()) && ElementType.IMPORT_STATEMENT_BASE_BIT_SET.isInSet(myChild2.getElementType())) { int emptyLines = myImportHelper.getEmptyLinesBetween( (PsiImportStatementBase)SourceTreeToPsiMap.treeElementToPsi(myChild1), (PsiImportStatementBase)SourceTreeToPsiMap.treeElementToPsi(myChild2) ) + 1; myResult = Spacing.createSpacing(0, 0, emptyLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } public void visitFile(PsiFile file) { if (myRole1 == ChildRole.PACKAGE_STATEMENT) { int lf = mySettings.BLANK_LINES_AFTER_PACKAGE + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole2 == ChildRole.PACKAGE_STATEMENT) { int lf = mySettings.BLANK_LINES_BEFORE_PACKAGE + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole1 == ChildRole.IMPORT_LIST) { int lf = mySettings.BLANK_LINES_AFTER_IMPORTS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole2 == ChildRole.IMPORT_LIST) { int lf = mySettings.BLANK_LINES_BEFORE_IMPORTS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole2 == ChildRole.CLASS) { int lf = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } public void visitWhileStatement(PsiWhileStatement statement) { if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_WHILE_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_WHILE_PARENTHESES); } else if (myRole2 == ChildRole.LOOP_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_WHILE_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } } public void visitDoWhileStatement(PsiDoWhileStatement statement) { if (myRole1 == ChildRole.WHILE_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_WHILE_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_WHILE_PARENTHESES); } else if (myRole2 == ChildRole.LOOP_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_DO_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole1 == ChildRole.LOOP_BODY) { processOnNewLineCondition(mySettings.WHILE_ON_NEW_LINE); } } private void processOnNewLineCondition(final boolean onNewLine) { if (onNewLine) { if (!mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { myResult = myResult = Spacing .createSpacing(1, 1, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } public void visitTryStatement(PsiTryStatement statement) { if (myRole2 == ChildRole.FINALLY_KEYWORD) { processOnNewLineCondition(mySettings.FINALLY_ON_NEW_LINE); } else if (myRole2 == ChildRole.FINALLY_BLOCK || myRole2 == ChildRole.TRY_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FINALLY_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole2 == ChildRole.CATCH_SECTION) { processOnNewLineCondition(mySettings.CATCH_ON_NEW_LINE); } } public void visitForeachStatement(PsiForeachStatement statement) { if (myRole1 == ChildRole.FOR_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_FOR_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else if (myRole1 == ChildRole.FOR_ITERATION_PARAMETER && myRole2 == ChildRole.COLON) { createSpaceInCode(true); } else if (myRole1 == ChildRole.COLON && myRole2 == ChildRole.FOR_ITERATED_VALUE) { createSpaceInCode(true); } else if (myRole2 == ChildRole.LOOP_BODY) { if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FOR_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing .createDependentLFSpacing(1, 1, myParent.getTextRange(), false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } public void visitAssignmentExpression(PsiAssignmentExpression expression) { if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } } public void visitParenthesizedExpression(PsiParenthesizedExpression expression) { if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.PARENTHESES_EXPRESSION_LPAREN_WRAP, mySettings.SPACE_WITHIN_PARENTHESES); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.PARENTHESES_EXPRESSION_RPAREN_WRAP, mySettings.SPACE_WITHIN_PARENTHESES); } } public void visitCodeBlock(PsiCodeBlock block) { processCodeBlock(keepInOneLine(block), block.getTextRange()); } public void visitCodeFragment(PsiCodeFragment codeFragment) { final TokenSet statementBitSet = ElementType.STATEMENT_BIT_SET; if (statementBitSet.isInSet(myChild1.getElementType()) && statementBitSet.isInSet(myChild2.getElementType())) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private void processCodeBlock(final boolean keepInOneLine, final TextRange textRange) { if (myParent instanceof JspCodeBlock) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myRole1 == ChildRole.NONE || myRole2 == ChildRole.NONE) { if (myChild1.getElementType() == ElementType.END_OF_LINE_COMMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = null; } } else if (myRole1 == ChildRole.LBRACE) { if (!keepInOneLine) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, textRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else if (myRole2 == ChildRole.RBRACE) { if (!keepInOneLine) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, textRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } } else if (myChild1.getElementType() == ElementType.SWITCH_LABEL_STATEMENT && myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SWITCH_LBRACE, mySettings.BRACE_STYLE, null, false); } else if (myRole1 == ChildRole.STATEMENT_IN_BLOCK && myRole2 == ChildRole.STATEMENT_IN_BLOCK) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private boolean keepInOneLine(final PsiCodeBlock block) { if (block.getParent() instanceof PsiMethod) { return mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE; } else { return mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE; } } public void visitIfStatement(PsiIfStatement statement) { if (myRole2 == ChildRole.ELSE_KEYWORD) { if (myChild1.getElementType() != ElementType.BLOCK_STATEMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { if (mySettings.ELSE_ON_NEW_LINE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = createNonLFSpace(1, false, null); } } } else if (myRole1 == ChildRole.ELSE_KEYWORD) { if (myChild2.getElementType() == ElementType.IF_STATEMENT) { if (mySettings.SPECIAL_ELSE_IF_TREATMENT) { myResult = createNonLFSpace(1, true, null); } else { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_ELSE_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else { createSpaceInCode(mySettings.SPACE_BEFORE_ELSE_LBRACE); } } } else if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { boolean space = myRole2 == ChildRole.ELSE_BRANCH ? mySettings.SPACE_BEFORE_ELSE_LBRACE : mySettings.SPACE_BEFORE_IF_LBRACE; myResult = getSpaceBeforeLBrace(space, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_IF_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_IF_PARENTHESES); } else if (myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_IF_PARENTHESES); } else if (myRole2 == ChildRole.THEN_BRANCH) { createSpaceInCode(true); } } private Spacing createNonLFSpace(int spaces, final boolean keepLineBreaks, final TextRange dependantRange) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == ElementType.END_OF_LINE_COMMENT) { return Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (dependantRange != null) { return Spacing .createDependentLFSpacing(spaces, spaces, dependantRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { return Spacing.createSpacing(spaces, spaces, 0, keepLineBreaks, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private ASTNode getPrevElementType(final ASTNode child) { return FormatterUtil.getLeafNonSpaceBefore(child); } private Spacing getSpaceBeforeLBrace(final boolean spaceBeforeLbrace, int braceStyle, TextRange dependantRange, boolean keepOneLine) { if (dependantRange != null && braceStyle == CodeStyleSettings.NEXT_LINE_IF_WRAPPED) { int space = spaceBeforeLbrace ? 1 : 0; return createNonLFSpace(space, false, dependantRange); } else if (braceStyle == CodeStyleSettings.END_OF_LINE) { int space = spaceBeforeLbrace ? 1 : 0; return createNonLFSpace(space, false, null); } else if (keepOneLine) { int space = spaceBeforeLbrace ? 1 : 0; return Spacing .createDependentLFSpacing(space, space, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { return Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } public void visitBinaryExpression(PsiBinaryExpression expression) { PsiJavaToken sign = expression.getOperationSign(); IElementType i = sign.getTokenType(); if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { if (i == JavaTokenType.OROR || i == JavaTokenType.ANDAND) { createSpaceInCode(mySettings.SPACE_AROUND_LOGICAL_OPERATORS); } else if (i == JavaTokenType.OR || i == JavaTokenType.AND || i == JavaTokenType.XOR) { createSpaceInCode(mySettings.SPACE_AROUND_BITWISE_OPERATORS); } else if (i == JavaTokenType.EQEQ || i == JavaTokenType.NE) { createSpaceInCode(mySettings.SPACE_AROUND_EQUALITY_OPERATORS); } else if (i == JavaTokenType.GT || i == JavaTokenType.LT || i == JavaTokenType.GE || i == JavaTokenType.LE) { createSpaceInCode(mySettings.SPACE_AROUND_RELATIONAL_OPERATORS); } else if (i == JavaTokenType.PLUS || i == JavaTokenType.MINUS) { createSpaceInCode(mySettings.SPACE_AROUND_ADDITIVE_OPERATORS); } else if (i == JavaTokenType.ASTERISK || i == JavaTokenType.DIV || i == JavaTokenType.PERC) { createSpaceInCode(mySettings.SPACE_AROUND_MULTIPLICATIVE_OPERATORS); } else if (i == JavaTokenType.LTLT || i == JavaTokenType.GTGT || i == JavaTokenType.GTGTGT) { createSpaceInCode(mySettings.SPACE_AROUND_SHIFT_OPERATORS); } else { createSpaceInCode(false); } } } public void visitField(PsiField field) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole1 == ChildRole.INITIALIZER_EQ || myRole2 == ChildRole.INITIALIZER_EQ) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } else if (myRole1 == ChildRole.TYPE || myRole2 == ChildRole.TYPE) { createSpaceInCode(true); } else if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceProperty(false, false, 0); } else if (myRole1 == ChildRole.MODIFIER_LIST) { createSpaceProperty(true, false, 0); } } public void visitLocalVariable(PsiLocalVariable variable) { if (myRole1 == ChildRole.INITIALIZER_EQ || myRole2 == ChildRole.INITIALIZER_EQ) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } else if (myRole1 == ChildRole.MODIFIER_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE_REFERENCE) { createSpaceInCode(true); } else if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceProperty(false, false, 0); } } public void visitMethod(PsiMethod method) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole2 == ChildRole.PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_PARENTHESES); } else if (myRole1 == ChildRole.PARAMETER_LIST && myRole2 == ChildRole.THROWS_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.METHOD_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_METHOD_LBRACE, mySettings.METHOD_BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE); } else if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (ElementType.COMMENT_BIT_SET.isInSet(myChild1.getElementType()) && (myRole2 == ChildRole.MODIFIER_LIST || myRole2 == ChildRole.TYPE_REFERENCE)) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else if (myRole2 == ChildRole.DEFAULT_KEYWORD) { createSpaceInCode(true); } else if (myRole2 == ChildRole.ANNOTATION_DEFAULT_VALUE) { createSpaceInCode(true); } else if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceInCode(false); } } private void processModifierList() { if (mySettings.MODIFIER_LIST_WRAP) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceProperty(true, false, 0); } } public void visitModifierList(PsiModifierList list) { createSpaceInCode(true); } public void visitParameterList(PsiParameterList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE, false); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_PARENTHESES); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_PARENTHESES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } private void createParenSpace(final boolean onNewLine, final boolean space) { createParenSpace(onNewLine, space, myParent.getTextRange()); } private void createParenSpace(final boolean onNewLine, final boolean space, final TextRange dependance) { if (onNewLine) { final int spaces = space ? 1 : 0; myResult = Spacing .createDependentLFSpacing(spaces, spaces, dependance, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceInCode(space); } } public void visitElement(PsiElement element) { if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } } public void visitExpressionList(PsiExpressionList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE, false); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE, myRole1 == ChildRole.COMMA || mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } public void visitSynchronizedStatement(PsiSynchronizedStatement statement) { if (myRole1 == ChildRole.SYNCHRONIZED_KEYWORD || myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_SYNCHRONIZED_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_SYNCHRONIZED_PARENTHESES); } else if (myRole2 == ChildRole.BLOCK) { createSpaceInCode(mySettings.SPACE_BEFORE_SYNCHRONIZED_LBRACE); } } public void visitSwitchStatement(PsiSwitchStatement statement) { if (myRole1 == ChildRole.SWITCH_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_SWITCH_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_SWITCH_PARENTHESES); } else if (myRole2 == ChildRole.SWITCH_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SWITCH_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } } public void visitForStatement(PsiForStatement statement) { if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_FOR_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { ASTNode rparenth = findFrom(myChild2, ElementType.RPARENTH, true); if (rparenth == null) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else { createParenSpace(mySettings.FOR_STATEMENT_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_FOR_PARENTHESES, new TextRange(myChild1.getTextRange().getStartOffset(), rparenth.getTextRange().getEndOffset())); } } else if (myRole2 == ChildRole.RPARENTH) { ASTNode lparenth = findFrom(myChild2, ElementType.LPARENTH, false); if (lparenth == null) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else { createParenSpace(mySettings.FOR_STATEMENT_RPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_FOR_PARENTHESES, new TextRange(lparenth.getTextRange().getStartOffset(), myChild2.getTextRange().getEndOffset())); } } else if (myRole1 == ChildRole.FOR_INITIALIZATION) { createSpaceInCode(mySettings.SPACE_AFTER_SEMICOLON); } else if (myRole1 == ChildRole.CONDITION) { createSpaceInCode(mySettings.SPACE_BEFORE_SEMICOLON); } else if (myRole1 == ChildRole.FOR_SEMICOLON) { createSpaceInCode(mySettings.SPACE_AFTER_SEMICOLON); } else if (myRole2 == ChildRole.LOOP_BODY) { if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FOR_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing .createDependentLFSpacing(1, 1, myParent.getTextRange(), false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } private ASTNode findFrom(ASTNode current, final IElementType expected, boolean forward) { while (current != null) { if (current.getElementType() == expected) return current; current = forward ? current.getTreeNext() : current.getTreePrev(); } return null; } public void visitCatchSection(PsiCatchSection section) { if (myRole2 == ChildRole.CATCH_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CATCH_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole2 == ChildRole.CATCH_BLOCK_PARAMETER_LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_CATCH_PARENTHESES); } else if (myRole1 == ChildRole.CATCH_BLOCK_PARAMETER_LPARENTH || myRole2 == ChildRole.CATCH_BLOCK_PARAMETER_RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_CATCH_PARENTHESES); } } public void visitReferenceParameterList(PsiReferenceParameterList list) { if (myRole1 == ChildRole.LT_IN_TYPE_LIST && myRole2 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LT_IN_TYPE_LIST && myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(true); } else if (myRole1 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST && myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA && myRole2 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(false); } } public void visitTypeCastExpression(PsiTypeCastExpression expression) { if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_CAST_PARENTHESES); } else if (myRole1 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_AFTER_TYPE_CAST); } } private void createSpaceProperty(boolean space, int keepBlankLines) { createSpaceProperty(space, mySettings.KEEP_LINE_BREAKS, keepBlankLines); } private void createSpaceProperty(boolean space, boolean keepLineBreaks, final int keepBlankLines) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == ElementType.END_OF_LINE_COMMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { if (!space && !CodeEditUtil.canStickChildrenTogether(myChild1, myChild2)) { space = true; } if (!keepLineBreaks && myRole2 == ChildRole.NONE) { keepLineBreaks = true; } myResult = Spacing.createSpacing(space ? 1 : 0, space ? 1 : 0, 0, keepLineBreaks, keepBlankLines); } } public void visitReferenceList(PsiReferenceList list) { if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.EXTENDS_KEYWORD || myRole2 == ChildRole.EXTENDS_KEYWORD) { createSpaceInCode(true); } else if (myRole1 == ChildRole.AMPERSAND_IN_BOUNDS_LIST | myRole2 == ChildRole.AMPERSAND_IN_BOUNDS_LIST) { createSpaceInCode(true); } else if (myRole1 == ChildRole.IMPLEMENTS_KEYWORD || myRole2 == ChildRole.IMPLEMENTS_KEYWORD) { createSpaceInCode(true); } else if (myRole1 == ChildRole.THROWS_KEYWORD) { createSpaceInCode(true); } } public void visitReferenceExpression(PsiReferenceExpression expression) { visitReferenceElement(expression); } public void visitConditionalExpression(PsiConditionalExpression expression) { if (myRole2 == ChildRole.QUEST) { createSpaceInCode(mySettings.SPACE_BEFORE_QUEST); } else if (myRole1 == ChildRole.QUEST) { createSpaceInCode(mySettings.SPACE_AFTER_QUEST); } else if (myRole2 == ChildRole.COLON) { createSpaceInCode(mySettings.SPACE_BEFORE_COLON); } else if (myRole1 == ChildRole.COLON) { createSpaceInCode(mySettings.SPACE_AFTER_COLON); } } public void visitStatement(PsiStatement statement) { if (myRole2 == ChildRole.CLOSING_SEMICOLON) { createSpaceInCode(false); } } public void visitReturnStatement(PsiReturnStatement statement) { if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceInCode(false); } else if (myRole1 == ChildRole.RETURN_KEYWORD) { createSpaceInCode(true); } else { super.visitReturnStatement(statement); } } public void visitMethodCallExpression(PsiMethodCallExpression expression) { if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } public void visitTypeParameter(PsiTypeParameter classParameter) { createSpaceInCode(true); } public void visitDeclarationStatement(PsiDeclarationStatement declarationStatement) { if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } public void visitTypeParameterList(PsiTypeParameterList list) { if (myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(false); } } public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { if (myRole2 == ChildRole.REFERENCE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_TYPE_PARAMETER_LIST); } } public void visitAnnotation(PsiAnnotation annotation) { if (myRole2 == ChildRole.PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_ANOTATION_PARAMETER_LIST); } else if (myChild1.getElementType() == ElementType.AT && myChild2.getElementType() == ElementType.JAVA_CODE_REFERENCE) { createSpaceInCode(false); } } public void visitClassInitializer(PsiClassInitializer initializer) { if (myChild2.getElementType() == ElementType.CODE_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_METHOD_LBRACE, mySettings.BRACE_STYLE, null, false); } } public void visitAnnotationParameterList(PsiAnnotationParameterList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_ANNOTATION_PARENTHESES); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } public void visitNameValuePair(PsiNameValuePair pair) { if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } } public void visitAnnotationArrayInitializer(PsiArrayInitializerMemberValue initializer) { if (myRole1 == ChildRole.LBRACE && myRole2 == ChildRole.RBRACE) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LBRACE || myRole2 == ChildRole.RBRACE) { createSpaceInCode(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } public void visitEnumConstant(PsiEnumConstant enumConstant) { if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } public void visitDocTag(PsiDocTag tag) { if (myChild1.getElementType() == ElementType.DOC_TAG_NAME && myChild2.getElementType() == ElementType.DOC_TAG_VALUE_TOKEN) { myResult = Spacing.createSpacing(1, 1, 0, false, 0); } } }
source/com/intellij/psi/formatter/java/JavaSpacePropertyProcessor.java
package com.intellij.psi.formatter.java; import com.intellij.codeFormatting.general.FormatterUtil; import com.intellij.lang.ASTNode; import com.intellij.formatting.Spacing; import com.intellij.openapi.util.TextRange; import com.intellij.psi.*; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.impl.source.SourceTreeToPsiMap; import com.intellij.psi.impl.source.codeStyle.CodeEditUtil; import com.intellij.psi.impl.source.codeStyle.ImportHelper; import com.intellij.psi.impl.source.jsp.jspJava.JspCodeBlock; import com.intellij.psi.impl.source.tree.ChildRole; import com.intellij.psi.impl.source.tree.CompositeElement; import com.intellij.psi.impl.source.tree.ElementType; import com.intellij.psi.impl.source.tree.JavaDocElementType; import com.intellij.psi.javadoc.PsiDocTag; import com.intellij.psi.tree.IElementType; import com.intellij.psi.tree.TokenSet; public class JavaSpacePropertyProcessor extends PsiElementVisitor { private PsiElement myParent; private int myRole1; private int myRole2; private final CodeStyleSettings mySettings; private Spacing myResult; private ASTNode myChild1; private ASTNode myChild2; private final ImportHelper myImportHelper; public JavaSpacePropertyProcessor(ASTNode child, final CodeStyleSettings settings) { init(child); mySettings = settings; myImportHelper = new ImportHelper(mySettings); if (myChild2 != null && mySettings.KEEP_FIRST_COLUMN_COMMENT && ElementType.COMMENT_BIT_SET.isInSet(myChild2.getElementType())) { myResult = Spacing .createKeepingFirstColumnSpacing(0, Integer.MAX_VALUE, mySettings.KEEP_LINE_BREAKS, getKeepBlankLines()); } else { if (myParent != null) { myParent.accept(this); if (myResult == null) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == ElementType.END_OF_LINE_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (!CodeEditUtil.canStickChildrenTogether(myChild1, myChild2)) { myResult = Spacing .createSpacing(1, Integer.MIN_VALUE, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } } } private int getKeepBlankLines() { if (myChild2.getElementType() == ElementType.RBRACE) return mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE; if (SourceTreeToPsiMap.psiElementToTree(myParent).getElementType() == ElementType .CLASS) { return mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS; } return mySettings.KEEP_BLANK_LINES_IN_CODE; } private void init(final ASTNode child) { if (child == null) return; ASTNode treePrev = child.getTreePrev(); while (isWhiteSpace(treePrev)) { treePrev = treePrev.getTreePrev(); } if (treePrev == null) { init(child.getTreeParent()); } else { myChild2 = child; myChild1 = treePrev; final CompositeElement parent = (CompositeElement)myChild1.getTreeParent(); myParent = SourceTreeToPsiMap.treeElementToPsi(parent); myRole1 = parent.getChildRole(myChild1); myRole2 = parent.getChildRole(myChild2); } } private boolean isWhiteSpace(final ASTNode treePrev) { if (treePrev == null) return false; return treePrev.getElementType() == ElementType.WHITE_SPACE || treePrev.getTextLength() == 0; } public Spacing getResult() { return myResult; } public void visitArrayAccessExpression(PsiArrayAccessExpression expression) { if (myRole1 == ChildRole.ARRAY && myRole2 == ChildRole.LBRACKET) { final boolean space = false; createSpaceInCode(space); } else if (myRole1 == ChildRole.LBRACKET || myRole2 == ChildRole.RBRACKET) { createSpaceInCode(mySettings.SPACE_WITHIN_BRACKETS); } } private void createSpaceInCode(final boolean space) { createSpaceProperty(space, mySettings.KEEP_BLANK_LINES_IN_CODE); } public void visitNewExpression(PsiNewExpression expression) { if (myRole2 == ChildRole.ARRAY_INITIALIZER) { createSpaceInCode(mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE); } else if (myRole1 == ChildRole.NEW_KEYWORD) { createSpaceInCode(true); } else if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } public void visitArrayInitializerExpression(PsiArrayInitializerExpression expression) { if (myRole1 == ChildRole.LBRACE) { createSpaceInCode(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES); } else if (myRole2 == ChildRole.LBRACE) { createSpaceInCode(mySettings.SPACE_BEFORE_ARRAY_INITIALIZER_LBRACE); } else if (myRole2 == ChildRole.RBRACE) { createSpaceProperty(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_AFTER_COMMA); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(mySettings.SPACE_BEFORE_COMMA); } } public void visitClass(PsiClass aClass) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole2 == ChildRole.LBRACE) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CLASS_LBRACE, mySettings.CLASS_BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), false); } else if (myRole2 == ChildRole.METHOD || myRole2 == ChildRole.CLASS_INITIALIZER) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_METHOD + 1; myResult = Spacing .createSpacing(0, 0, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.METHOD || myRole1 == ChildRole.CLASS_INITIALIZER) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_METHOD + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.CLASS) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.CLASS) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole2 == ChildRole.FIELD) { if (myRole1 == ChildRole.LBRACE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_FIELD + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.FIELD) { if (myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { final int blankLines = mySettings.BLANK_LINES_AROUND_FIELD + 1; myResult = Spacing .createSpacing(0, Integer.MAX_VALUE, blankLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } else if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (myRole1 == ChildRole.LBRACE && myRole2 == ChildRole.RBRACE) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else if (myRole2 == ChildRole.EXTENDS_LIST || myRole2 == ChildRole.IMPLEMENTS_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_TYPE_PARAMETER_LIST); } else if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(false); } } public void visitImportList(PsiImportList list) { if (ElementType.IMPORT_STATEMENT_BASE_BIT_SET.isInSet(myChild1.getElementType()) && ElementType.IMPORT_STATEMENT_BASE_BIT_SET.isInSet(myChild2.getElementType())) { int emptyLines = myImportHelper.getEmptyLinesBetween( (PsiImportStatementBase)SourceTreeToPsiMap.treeElementToPsi(myChild1), (PsiImportStatementBase)SourceTreeToPsiMap.treeElementToPsi(myChild2) ) + 1; myResult = Spacing.createSpacing(0, 0, emptyLines, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } public void visitFile(PsiFile file) { if (myRole1 == ChildRole.PACKAGE_STATEMENT) { int lf = mySettings.BLANK_LINES_AFTER_PACKAGE + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole2 == ChildRole.PACKAGE_STATEMENT) { int lf = mySettings.BLANK_LINES_BEFORE_PACKAGE + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole1 == ChildRole.IMPORT_LIST) { int lf = mySettings.BLANK_LINES_AFTER_IMPORTS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole2 == ChildRole.IMPORT_LIST) { int lf = mySettings.BLANK_LINES_BEFORE_IMPORTS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } else if (myRole2 == ChildRole.CLASS) { int lf = mySettings.BLANK_LINES_AROUND_CLASS + 1; myResult = Spacing .createSpacing(0, 0, lf, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); } } public void visitWhileStatement(PsiWhileStatement statement) { if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_WHILE_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_WHILE_PARENTHESES); } else if (myRole2 == ChildRole.LOOP_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_WHILE_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } } public void visitDoWhileStatement(PsiDoWhileStatement statement) { if (myRole1 == ChildRole.WHILE_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_WHILE_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_WHILE_PARENTHESES); } else if (myRole2 == ChildRole.LOOP_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_DO_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole1 == ChildRole.LOOP_BODY) { processOnNewLineCondition(mySettings.WHILE_ON_NEW_LINE); } } private void processOnNewLineCondition(final boolean onNewLine) { if (onNewLine) { if (!mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { myResult = myResult = Spacing .createSpacing(1, 1, 0, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } public void visitTryStatement(PsiTryStatement statement) { if (myRole2 == ChildRole.FINALLY_KEYWORD) { processOnNewLineCondition(mySettings.FINALLY_ON_NEW_LINE); } else if (myRole2 == ChildRole.FINALLY_BLOCK || myRole2 == ChildRole.TRY_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FINALLY_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole2 == ChildRole.CATCH_SECTION) { processOnNewLineCondition(mySettings.CATCH_ON_NEW_LINE); } } public void visitForeachStatement(PsiForeachStatement statement) { if (myRole1 == ChildRole.FOR_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_FOR_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else if (myRole1 == ChildRole.FOR_ITERATION_PARAMETER && myRole2 == ChildRole.COLON) { createSpaceInCode(true); } else if (myRole1 == ChildRole.COLON && myRole2 == ChildRole.FOR_ITERATED_VALUE) { createSpaceInCode(true); } else if (myRole2 == ChildRole.LOOP_BODY) { if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FOR_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing .createDependentLFSpacing(1, 1, myParent.getTextRange(), false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } public void visitAssignmentExpression(PsiAssignmentExpression expression) { if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } } public void visitParenthesizedExpression(PsiParenthesizedExpression expression) { if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.PARENTHESES_EXPRESSION_LPAREN_WRAP, mySettings.SPACE_WITHIN_PARENTHESES); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.PARENTHESES_EXPRESSION_RPAREN_WRAP, mySettings.SPACE_WITHIN_PARENTHESES); } } public void visitCodeBlock(PsiCodeBlock block) { processCodeBlock(keepInOneLine(block), block.getTextRange()); } public void visitCodeFragment(PsiCodeFragment codeFragment) { final TokenSet statementBitSet = ElementType.STATEMENT_BIT_SET; if (statementBitSet.isInSet(myChild1.getElementType()) && statementBitSet.isInSet(myChild2.getElementType())) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private void processCodeBlock(final boolean keepInOneLine, final TextRange textRange) { if (myParent instanceof JspCodeBlock) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (myRole1 == ChildRole.NONE || myRole2 == ChildRole.NONE) { if (myChild1.getElementType() == ElementType.END_OF_LINE_COMMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = null; } } else if (myRole1 == ChildRole.LBRACE) { if (!keepInOneLine) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, textRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else if (myRole2 == ChildRole.RBRACE) { if (!keepInOneLine) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } else { myResult = Spacing .createDependentLFSpacing(0, 1, textRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_BEFORE_RBRACE); } } else if (myChild1.getElementType() == ElementType.SWITCH_LABEL_STATEMENT && myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SWITCH_LBRACE, mySettings.BRACE_STYLE, null, false); } else if (myRole1 == ChildRole.STATEMENT_IN_BLOCK && myRole2 == ChildRole.STATEMENT_IN_BLOCK) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private boolean keepInOneLine(final PsiCodeBlock block) { if (block.getParent() instanceof PsiMethod) { return mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE; } else { return mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE; } } public void visitIfStatement(PsiIfStatement statement) { if (myRole2 == ChildRole.ELSE_KEYWORD) { if (myChild1.getElementType() != ElementType.BLOCK_STATEMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { if (mySettings.ELSE_ON_NEW_LINE) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = createNonLFSpace(1, false, null); } } } else if (myRole1 == ChildRole.ELSE_KEYWORD) { if (myChild2.getElementType() == ElementType.IF_STATEMENT) { if (mySettings.SPECIAL_ELSE_IF_TREATMENT) { myResult = createNonLFSpace(1, true, null); } else { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } } else { if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_ELSE_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else { createSpaceInCode(mySettings.SPACE_BEFORE_ELSE_LBRACE); } } } else if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { boolean space = myRole2 == ChildRole.ELSE_BRANCH ? mySettings.SPACE_BEFORE_ELSE_LBRACE : mySettings.SPACE_BEFORE_IF_LBRACE; myResult = getSpaceBeforeLBrace(space, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_IF_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_IF_PARENTHESES); } else if (myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_IF_PARENTHESES); } else if (myRole2 == ChildRole.THEN_BRANCH) { createSpaceInCode(true); } } private Spacing createNonLFSpace(int spaces, final boolean keepLineBreaks, final TextRange dependantRange) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == ElementType.END_OF_LINE_COMMENT) { return Spacing .createSpacing(0, Integer.MAX_VALUE, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else if (dependantRange != null) { return Spacing .createDependentLFSpacing(spaces, spaces, dependantRange, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { return Spacing.createSpacing(spaces, spaces, 0, keepLineBreaks, mySettings.KEEP_BLANK_LINES_IN_CODE); } } private ASTNode getPrevElementType(final ASTNode child) { return FormatterUtil.getLeafNonSpaceBefore(child); } private Spacing getSpaceBeforeLBrace(final boolean spaceBeforeLbrace, int braceStyle, TextRange dependantRange, boolean keepOneLine) { if (dependantRange != null && braceStyle == CodeStyleSettings.NEXT_LINE_IF_WRAPPED) { int space = spaceBeforeLbrace ? 1 : 0; return createNonLFSpace(space, false, dependantRange); } else if (braceStyle == CodeStyleSettings.END_OF_LINE) { int space = spaceBeforeLbrace ? 1 : 0; return createNonLFSpace(space, false, null); } else if (keepOneLine) { int space = spaceBeforeLbrace ? 1 : 0; return Spacing .createDependentLFSpacing(space, space, myParent.getTextRange(), mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { return Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } public void visitBinaryExpression(PsiBinaryExpression expression) { PsiJavaToken sign = expression.getOperationSign(); IElementType i = sign.getTokenType(); if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { if (i == JavaTokenType.OROR || i == JavaTokenType.ANDAND) { createSpaceInCode(mySettings.SPACE_AROUND_LOGICAL_OPERATORS); } else if (i == JavaTokenType.OR || i == JavaTokenType.AND || i == JavaTokenType.XOR) { createSpaceInCode(mySettings.SPACE_AROUND_BITWISE_OPERATORS); } else if (i == JavaTokenType.EQEQ || i == JavaTokenType.NE) { createSpaceInCode(mySettings.SPACE_AROUND_EQUALITY_OPERATORS); } else if (i == JavaTokenType.GT || i == JavaTokenType.LT || i == JavaTokenType.GE || i == JavaTokenType.LE) { createSpaceInCode(mySettings.SPACE_AROUND_RELATIONAL_OPERATORS); } else if (i == JavaTokenType.PLUS || i == JavaTokenType.MINUS) { createSpaceInCode(mySettings.SPACE_AROUND_ADDITIVE_OPERATORS); } else if (i == JavaTokenType.ASTERISK || i == JavaTokenType.DIV || i == JavaTokenType.PERC) { createSpaceInCode(mySettings.SPACE_AROUND_MULTIPLICATIVE_OPERATORS); } else if (i == JavaTokenType.LTLT || i == JavaTokenType.GTGT || i == JavaTokenType.GTGTGT) { createSpaceInCode(mySettings.SPACE_AROUND_SHIFT_OPERATORS); } else { createSpaceInCode(false); } } } public void visitField(PsiField field) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole1 == ChildRole.INITIALIZER_EQ || myRole2 == ChildRole.INITIALIZER_EQ) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } else if (myRole1 == ChildRole.TYPE || myRole2 == ChildRole.TYPE) { createSpaceInCode(true); } else if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceProperty(false, false, 0); } else if (myRole1 == ChildRole.MODIFIER_LIST) { createSpaceProperty(true, false, 0); } } public void visitLocalVariable(PsiLocalVariable variable) { if (myRole1 == ChildRole.INITIALIZER_EQ || myRole2 == ChildRole.INITIALIZER_EQ) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } else if (myRole1 == ChildRole.MODIFIER_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.TYPE_REFERENCE) { createSpaceInCode(true); } else if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceProperty(false, false, 0); } } public void visitMethod(PsiMethod method) { if (myChild1.getElementType() == JavaDocElementType.DOC_COMMENT) { myResult = Spacing .createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_DECLARATIONS); return; } if (myRole2 == ChildRole.PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_PARENTHESES); } else if (myRole1 == ChildRole.PARAMETER_LIST && myRole2 == ChildRole.THROWS_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.METHOD_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_METHOD_LBRACE, mySettings.METHOD_BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_METHODS_IN_ONE_LINE); } else if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } else if (ElementType.COMMENT_BIT_SET.isInSet(myChild1.getElementType()) && (myRole2 == ChildRole.MODIFIER_LIST || myRole2 == ChildRole.TYPE_REFERENCE)) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, 0); } else if (myRole2 == ChildRole.DEFAULT_KEYWORD) { createSpaceInCode(true); } else if (myRole2 == ChildRole.ANNOTATION_DEFAULT_VALUE) { createSpaceInCode(true); } else if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceInCode(false); } } private void processModifierList() { if (mySettings.MODIFIER_LIST_WRAP) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceProperty(true, false, 0); } } public void visitModifierList(PsiModifierList list) { createSpaceInCode(true); } public void visitParameterList(PsiParameterList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE, false); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_RPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_PARENTHESES); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.METHOD_PARAMETERS_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_PARENTHESES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } private void createParenSpace(final boolean onNewLine, final boolean space) { createParenSpace(onNewLine, space, myParent.getTextRange()); } private void createParenSpace(final boolean onNewLine, final boolean space, final TextRange dependance) { if (onNewLine) { final int spaces = space ? 1 : 0; myResult = Spacing .createDependentLFSpacing(spaces, spaces, dependance, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { createSpaceInCode(space); } } public void visitElement(PsiElement element) { if (myRole1 == ChildRole.MODIFIER_LIST) { processModifierList(); } } public void visitExpressionList(PsiExpressionList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE, false); } else if (myRole2 == ChildRole.RPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_RPAREN_ON_NEXT_LINE, myRole1 == ChildRole.COMMA || mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { createParenSpace(mySettings.CALL_PARAMETERS_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_METHOD_CALL_PARENTHESES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } public void visitSynchronizedStatement(PsiSynchronizedStatement statement) { if (myRole1 == ChildRole.SYNCHRONIZED_KEYWORD || myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_SYNCHRONIZED_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_SYNCHRONIZED_PARENTHESES); } else if (myRole2 == ChildRole.BLOCK) { createSpaceInCode(mySettings.SPACE_BEFORE_SYNCHRONIZED_LBRACE); } } public void visitSwitchStatement(PsiSwitchStatement statement) { if (myRole1 == ChildRole.SWITCH_KEYWORD && myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_SWITCH_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_SWITCH_PARENTHESES); } else if (myRole2 == ChildRole.SWITCH_BODY) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_SWITCH_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } } public void visitForStatement(PsiForStatement statement) { if (myRole2 == ChildRole.LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_FOR_PARENTHESES); } else if (myRole1 == ChildRole.LPARENTH) { ASTNode rparenth = findFrom(myChild2, ElementType.RPARENTH, true); if (rparenth == null) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else { createParenSpace(mySettings.FOR_STATEMENT_LPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_FOR_PARENTHESES, new TextRange(myChild1.getTextRange().getStartOffset(), rparenth.getTextRange().getEndOffset())); } } else if (myRole2 == ChildRole.RPARENTH) { ASTNode lparenth = findFrom(myChild2, ElementType.LPARENTH, false); if (lparenth == null) { createSpaceInCode(mySettings.SPACE_WITHIN_FOR_PARENTHESES); } else { createParenSpace(mySettings.FOR_STATEMENT_RPAREN_ON_NEXT_LINE, mySettings.SPACE_WITHIN_FOR_PARENTHESES, new TextRange(lparenth.getTextRange().getStartOffset(), myChild2.getTextRange().getEndOffset())); } } else if (myRole1 == ChildRole.FOR_INITIALIZATION) { createSpaceInCode(mySettings.SPACE_AFTER_SEMICOLON); } else if (myRole1 == ChildRole.CONDITION) { createSpaceInCode(mySettings.SPACE_BEFORE_SEMICOLON); } else if (myRole1 == ChildRole.FOR_SEMICOLON) { createSpaceInCode(mySettings.SPACE_AFTER_SEMICOLON); } else if (myRole2 == ChildRole.LOOP_BODY) { if (myChild2.getElementType() == ElementType.BLOCK_STATEMENT) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_FOR_LBRACE, mySettings.BRACE_STYLE, new TextRange(myParent.getTextRange().getStartOffset(), myChild2.getTextRange().getStartOffset()), mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE) { myResult = Spacing .createDependentLFSpacing(1, 1, myParent.getTextRange(), false, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { myResult = Spacing.createSpacing(0, 0, 1, false, mySettings.KEEP_BLANK_LINES_IN_CODE); } } } private ASTNode findFrom(ASTNode current, final IElementType expected, boolean forward) { while (current != null) { if (current.getElementType() == expected) return current; current = forward ? current.getTreeNext() : current.getTreePrev(); } return null; } public void visitCatchSection(PsiCatchSection section) { if (myRole2 == ChildRole.CATCH_BLOCK) { myResult = getSpaceBeforeLBrace(mySettings.SPACE_BEFORE_CATCH_LBRACE, mySettings.BRACE_STYLE, null, mySettings.KEEP_SIMPLE_BLOCKS_IN_ONE_LINE); } else if (myRole2 == ChildRole.CATCH_BLOCK_PARAMETER_LPARENTH) { createSpaceInCode(mySettings.SPACE_BEFORE_CATCH_PARENTHESES); } else if (myRole1 == ChildRole.CATCH_BLOCK_PARAMETER_LPARENTH || myRole2 == ChildRole.CATCH_BLOCK_PARAMETER_RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_CATCH_PARENTHESES); } } public void visitReferenceParameterList(PsiReferenceParameterList list) { if (myRole1 == ChildRole.LT_IN_TYPE_LIST && myRole2 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LT_IN_TYPE_LIST && myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(true); } else if (myRole1 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST && myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA && myRole2 == ChildRole.TYPE_IN_REFERENCE_PARAMETER_LIST) { createSpaceInCode(true); } else if (myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(false); } } public void visitTypeCastExpression(PsiTypeCastExpression expression) { if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_CAST_PARENTHESES); } else if (myRole1 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_AFTER_TYPE_CAST); } } private void createSpaceProperty(boolean space, int keepBlankLines) { createSpaceProperty(space, mySettings.KEEP_LINE_BREAKS, keepBlankLines); } private void createSpaceProperty(boolean space, boolean keepLineBreaks, final int keepBlankLines) { final ASTNode prev = getPrevElementType(myChild2); if (prev != null && prev.getElementType() == ElementType.END_OF_LINE_COMMENT) { myResult = Spacing.createSpacing(0, 0, 1, mySettings.KEEP_LINE_BREAKS, mySettings.KEEP_BLANK_LINES_IN_CODE); } else { if (!space && !CodeEditUtil.canStickChildrenTogether(myChild1, myChild2)) { space = true; } if (!keepLineBreaks && myRole2 == ChildRole.NONE) { keepLineBreaks = true; } myResult = Spacing.createSpacing(space ? 1 : 0, space ? 1 : 0, 0, keepLineBreaks, keepBlankLines); } } public void visitReferenceList(PsiReferenceList list) { if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.EXTENDS_KEYWORD || myRole2 == ChildRole.EXTENDS_KEYWORD) { createSpaceInCode(true); } else if (myRole1 == ChildRole.AMPERSAND_IN_BOUNDS_LIST | myRole2 == ChildRole.AMPERSAND_IN_BOUNDS_LIST) { createSpaceInCode(true); } else if (myRole1 == ChildRole.IMPLEMENTS_KEYWORD || myRole2 == ChildRole.IMPLEMENTS_KEYWORD) { createSpaceInCode(true); } else if (myRole1 == ChildRole.THROWS_KEYWORD) { createSpaceInCode(true); } } public void visitReferenceExpression(PsiReferenceExpression expression) { visitReferenceElement(expression); } public void visitConditionalExpression(PsiConditionalExpression expression) { if (myRole2 == ChildRole.QUEST) { createSpaceInCode(mySettings.SPACE_BEFORE_QUEST); } else if (myRole1 == ChildRole.QUEST) { createSpaceInCode(mySettings.SPACE_AFTER_QUEST); } else if (myRole2 == ChildRole.COLON) { createSpaceInCode(mySettings.SPACE_BEFORE_COLON); } else if (myRole1 == ChildRole.COLON) { createSpaceInCode(mySettings.SPACE_AFTER_COLON); } } public void visitStatement(PsiStatement statement) { if (myRole2 == ChildRole.CLOSING_SEMICOLON) { createSpaceInCode(false); } } public void visitReturnStatement(PsiReturnStatement statement) { if (myChild2.getElementType() == ElementType.SEMICOLON) { createSpaceInCode(false); } else if (myRole1 == ChildRole.RETURN_KEYWORD) { createSpaceInCode(true); } else { super.visitReturnStatement(statement); } } public void visitMethodCallExpression(PsiMethodCallExpression expression) { if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } public void visitTypeParameter(PsiTypeParameter classParameter) { createSpaceInCode(true); } public void visitDeclarationStatement(PsiDeclarationStatement declarationStatement) { if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } public void visitTypeParameterList(PsiTypeParameterList list) { if (myRole2 == ChildRole.GT_IN_TYPE_LIST) { createSpaceInCode(false); } } public void visitReferenceElement(PsiJavaCodeReferenceElement reference) { if (myRole2 == ChildRole.REFERENCE_PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_TYPE_PARAMETER_LIST); } } public void visitAnnotation(PsiAnnotation annotation) { if (myRole2 == ChildRole.PARAMETER_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_ANOTATION_PARAMETER_LIST); } else if (myChild1.getElementType() == ElementType.AT && myChild2.getElementType() == ElementType.JAVA_CODE_REFERENCE) { createSpaceInCode(false); } } public void visitAnnotationParameterList(PsiAnnotationParameterList list) { if (myRole1 == ChildRole.LPARENTH && myRole2 == ChildRole.RPARENTH) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LPARENTH || myRole2 == ChildRole.RPARENTH) { createSpaceInCode(mySettings.SPACE_WITHIN_ANNOTATION_PARENTHESES); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } } public void visitNameValuePair(PsiNameValuePair pair) { if (myRole1 == ChildRole.OPERATION_SIGN || myRole2 == ChildRole.OPERATION_SIGN) { createSpaceInCode(mySettings.SPACE_AROUND_ASSIGNMENT_OPERATORS); } } public void visitAnnotationArrayInitializer(PsiArrayInitializerMemberValue initializer) { if (myRole1 == ChildRole.LBRACE && myRole2 == ChildRole.RBRACE) { createSpaceInCode(false); } else if (myRole1 == ChildRole.LBRACE || myRole2 == ChildRole.RBRACE) { createSpaceInCode(mySettings.SPACE_WITHIN_ARRAY_INITIALIZER_BRACES); } else if (myRole1 == ChildRole.COMMA) { createSpaceInCode(true); } else if (myRole2 == ChildRole.COMMA) { createSpaceInCode(false); } } public void visitEnumConstant(PsiEnumConstant enumConstant) { if (myRole2 == ChildRole.ARGUMENT_LIST) { createSpaceInCode(mySettings.SPACE_BEFORE_METHOD_CALL_PARENTHESES); } } public void visitDocTag(PsiDocTag tag) { if (myChild1.getElementType() == ElementType.DOC_TAG_NAME && myChild2.getElementType() == ElementType.DOC_TAG_VALUE_TOKEN) { myResult = Spacing.createSpacing(1, 1, 0, false, 0); } } }
http://www.jetbrains.net/jira/browse/IDEADEV-1712 fixed
source/com/intellij/psi/formatter/java/JavaSpacePropertyProcessor.java
http://www.jetbrains.net/jira/browse/IDEADEV-1712 fixed
Java
apache-2.0
6851c513cd8e851d41bba8c8ae3b1bb26483c48e
0
ghxiao/ontop-spatial,ontop/ontop,clarkparsia/ontop,ConstantB/ontop-spatial,ontop/ontop,ghxiao/ontop-spatial,srapisarda/ontop,ConstantB/ontop-spatial,ontop/ontop,ConstantB/ontop-spatial,ghxiao/ontop-spatial,eschwert/ontop,eschwert/ontop,ontop/ontop,srapisarda/ontop,eschwert/ontop,srapisarda/ontop,srapisarda/ontop,ontop/ontop,ConstantB/ontop-spatial,ghxiao/ontop-spatial,clarkparsia/ontop,eschwert/ontop,clarkparsia/ontop
package it.unibz.krdb.obda.owlrefplatform.core; import it.unibz.krdb.obda.codec.DatalogProgramToTextCodec; import it.unibz.krdb.obda.model.Atom; import it.unibz.krdb.obda.model.CQIE; import it.unibz.krdb.obda.model.DatalogProgram; import it.unibz.krdb.obda.model.OBDAConnection; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.OBDAException; import it.unibz.krdb.obda.model.OBDAModel; import it.unibz.krdb.obda.model.OBDAQuery; import it.unibz.krdb.obda.model.OBDAResultSet; import it.unibz.krdb.obda.model.OBDAStatement; import it.unibz.krdb.obda.model.Term; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.model.impl.OBDAVocabulary; import it.unibz.krdb.obda.ontology.Assertion; import it.unibz.krdb.obda.owlrefplatform.core.abox.RDBMSDataRepositoryManager; import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.QueryVocabularyValidator; import it.unibz.krdb.obda.owlrefplatform.core.reformulation.QueryRewriter; import it.unibz.krdb.obda.owlrefplatform.core.resultset.BooleanOWLOBDARefResultSet; import it.unibz.krdb.obda.owlrefplatform.core.resultset.EmptyQueryResultSet; import it.unibz.krdb.obda.owlrefplatform.core.resultset.OWLOBDARefResultSet; import it.unibz.krdb.obda.owlrefplatform.core.srcquerygeneration.SourceQueryGenerator; import it.unibz.krdb.obda.owlrefplatform.core.unfolding.UnfoldingMechanism; import it.unibz.krdb.obda.parser.DatalogProgramParser; import it.unibz.krdb.obda.parser.SPARQLDatalogTranslator; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Vector; import java.util.regex.Pattern; import org.antlr.runtime.RecognitionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.QueryException; /** * The obda statement provides the implementations necessary to query the * reformulation platform reasoner from outside, i.e. protege * * */ public class QuestStatement implements OBDAStatement { private QueryRewriter rewriter = null; private UnfoldingMechanism unfoldingmechanism = null; private SourceQueryGenerator querygenerator = null; private QueryVocabularyValidator validator = null; private OBDAModel unfoldingOBDAModel = null; private boolean canceled = false; Logger log = LoggerFactory.getLogger(QuestStatement.class); private Statement sqlstatement; private RDBMSDataRepositoryManager repository; private DatalogProgram unfoldingProgram; private QuestConnection conn; protected Quest questInstance; public QuestStatement(Quest questinstance, QuestConnection conn, Statement st) { this.questInstance = questinstance; this.repository = questinstance.dataRepository; this.conn = conn; this.rewriter = questinstance.rewriter; this.unfoldingmechanism = questinstance.unfolder; this.querygenerator = questinstance.datasourceQueryGenerator; // this.engine = eng; this.sqlstatement = st; this.validator = questinstance.vocabularyValidator; // this.query = query; this.unfoldingOBDAModel = questinstance.unfoldingOBDAModel; } /** * Returns the result set for the given query */ @Override public OBDAResultSet execute(String strquery) throws OBDAException { if (strquery.split("[eE][tT][aA][bB][lL][eE]").length > 1) { return executeEpistemicQuery(strquery); } if (strquery.contains("/*direct*/")) { return executeDirectQuery(strquery); } else { return executeConjunctiveQuery(strquery); } } /*** * This method will 'chop' the original query into the subqueries, computing * the SQL for each of the nested query and composing everything into a * single SQL. * * @param strquery * @return * @throws Exception */ private OBDAResultSet executeEpistemicQuery(String strquery) throws OBDAException { try { OBDAResultSet result; String epistemicUnfolding = getSQLEpistemic(strquery); ResultSet set = sqlstatement.executeQuery(epistemicUnfolding); int columnCount = set.getMetaData().getColumnCount(); List<Term> typing = getDefaultTypingSignature(columnCount); result = new OWLOBDARefResultSet(set, typing, this); return result; } catch (Exception e) { throw new OBDAException(e); } } private OBDAResultSet executeDirectQuery(String query) throws OBDAException { try { OBDAResultSet result; ResultSet set = sqlstatement.executeQuery(query); int columnCount = set.getMetaData().getColumnCount(); List<Term> typing = getDefaultTypingSignature(columnCount); result = new OWLOBDARefResultSet(set, typing, this); return result; } catch (Exception e) { throw new OBDAException(e); } } private List<Term> getDefaultTypingSignature(int columnCount) { OBDADataFactory fac = OBDADataFactoryImpl.getInstance(); List<Term> signatureTyping = new ArrayList<Term>(); for (int i = 0; i < columnCount; i++) { signatureTyping.add(fac.getFunctionalTerm(OBDAVocabulary.RDFS_LITERAL, fac.getVariable("x"))); } return signatureTyping; } // private DatalogProgram getRewriting(DatalogProgram) { // // } // /*** * Translates a SPARQL query into Datalog dealing with equivalences and * verifying that the vocabulary of the query matches the one in the * ontology. If there are equivalences to handle, this is where its done * (i.e., renaming atoms that use predicates that have been replaced by a * canonical one. * * @param query * @return */ private DatalogProgram translateAndPreProcess(String strquery) throws OBDAException { // Contruct the datalog program object from the query string log.debug("Input user query:\n" + strquery); SPARQLDatalogTranslator sparqlTranslator = new SPARQLDatalogTranslator(); DatalogProgram program = null; try { program = sparqlTranslator.parse(strquery); } catch (QueryException e) { log.warn(e.getMessage()); } if (program == null) { // if the SPARQL translator doesn't work, // use the Datalog parser. DatalogProgramParser datalogParser = new DatalogProgramParser(); try { program = datalogParser.parse(strquery); } catch (RecognitionException e) { log.warn(e.getMessage()); program = null; } catch (IllegalArgumentException e2) { log.warn(e2.getMessage()); } } // Check the datalog object if (validator != null) { log.debug("Validating the user query..."); boolean isValid = validator.validate(program); if (!isValid) { Vector<String> invalidList = validator.getInvalidPredicates(); String msg = ""; for (String predicate : invalidList) { msg += "- " + predicate + "\n"; } throw new OBDAException("Unknown Classes/Properties in the query: \n" + msg); } } log.debug("Replacing equivalences..."); program = validator.replaceEquivalences(program); return program; } private DatalogProgram getUnfolding(DatalogProgram query) throws OBDAException { log.debug("Start the unfolding process..."); OBDAQuery unfolding = unfoldingmechanism.unfold((DatalogProgram) query); return (DatalogProgram)unfolding; } private String getSQL(DatalogProgram query, List<String> signature) throws OBDAException { if (((DatalogProgram) query).getRules().size() == 0) return ""; log.debug("Producing the SQL string..."); String sql = querygenerator.generateSourceQuery((DatalogProgram) query, signature); return sql; } private OBDAResultSet executeConjunctiveQuery(String strquery) throws OBDAException { List<String> signature = getSignature(strquery); DatalogProgram program = translateAndPreProcess(strquery); log.debug("Start the rewriting process..."); DatalogProgram rewriting = getRewriting(program); DatalogProgram unfolding = getUnfolding(rewriting); String sql = getSQL(unfolding, signature); OBDAResultSet result; log.debug("Executing the query and get the result..."); if (sql.equals("")) { /*** * Empty unfolding, constructing an empty result set */ if (program.getRules().size() < 1) { throw new OBDAException("Error, invalid query"); } result = new EmptyQueryResultSet(signature, this); } else { ResultSet set; try { set = sqlstatement.executeQuery(sql); } catch (SQLException e) { throw new OBDAException(e); } if (isDPBoolean(program)) { result = new BooleanOWLOBDARefResultSet(set, this); } else { List<Term> typingSignature = unfolding.getRules().get(0).getHead().getTerms(); try { result = new OWLOBDARefResultSet(set, typingSignature, this); } catch (SQLException e) { throw new OBDAException(e.getMessage()); } } } log.debug("Finish.\n"); return result; } /** * Returns the final rewriting of the given query */ public String getRewriting(String strquery) throws Exception { // TODO FIX to limit to SPARQL input and output DatalogProgram program = translateAndPreProcess(strquery); OBDAQuery rewriting = rewriter.rewrite(program); DatalogProgramToTextCodec codec = new DatalogProgramToTextCodec(unfoldingOBDAModel); return codec.encode((DatalogProgram) rewriting); } /** * Returns the final rewriting of the given query */ public DatalogProgram getRewriting(DatalogProgram program) throws OBDAException { OBDAQuery rewriting = rewriter.rewrite(program); return (DatalogProgram) rewriting; } private String getSQLEpistemic(String strquery) throws OBDAException { // FIRST WE try to analyze the query to find the CQs and the SQL part LinkedList<String> sql = new LinkedList<String>(); LinkedList<String> cqs = new LinkedList<String>(); StringBuffer query = new StringBuffer(strquery); Pattern pattern = Pattern.compile("[eE][tT][aA][bB][lL][eE]\\s*\\((\\r?\\n|\\n|.)+?\\)", Pattern.MULTILINE); while (true) { String[] splitquery = pattern.split(query.toString(), 2); if (splitquery.length > 1) { sql.add(splitquery[0]); query.delete(0, splitquery[0].length()); int position = query.toString().indexOf(splitquery[1]); String regex = query.toString().substring(0, position); cqs.add(regex.substring(regex.indexOf("(") + 1, regex.length() - 1)); query = new StringBuffer(splitquery[1]); } else { sql.add(splitquery[0]); break; } } // Now we generate the SQL for each CQ SPARQLDatalogTranslator t = new SPARQLDatalogTranslator(); LinkedList<String> sqlforcqs = new LinkedList<String>(); log.debug("Found {} embedded queries.", cqs.size()); for (int i = 0; i < cqs.size(); i++) { log.debug("Processing embedded query #{}", i); String cq = cqs.get(i); try { String finasql = getUnfolding(cq); log.debug("SQL: {}", finasql); sqlforcqs.add(finasql); } catch (Exception e) { log.error("Error processing nested query #{}", i); log.error(e.getMessage(), e); throw new OBDAException("Error processing nested query #" + i, e); } } // Now we concatenate the simple SQL with the rewritten SQL to generate // the query over DB. log.debug("Forming the final SQL."); StringBuffer finalquery = new StringBuffer(); for (int i = 0; i < sql.size(); i++) { finalquery.append(sql.get(i)); if (sqlforcqs.size() > i) { finalquery.append("("); finalquery.append(sqlforcqs.get(i)); finalquery.append(")"); } } log.debug("Final SQL query: {}", finalquery.toString()); if (finalquery.toString().equals("")) throw new OBDAException("Invalid SQL. The SQL query cannot be empty"); return finalquery.toString(); } /** * Returns the final rewriting of the given query */ public String getUnfolding(String strquery) throws Exception { String sql = null; if (strquery.split("[eE][tT][aA][bB][lL][eE]").length > 1) { sql = getSQLEpistemic(strquery); } else if (strquery.contains("/*direct*/")) { sql = strquery; } else { DatalogProgram p = translateAndPreProcess(strquery); DatalogProgram rewriting = getRewriting(p); DatalogProgram unfolding = getUnfolding(rewriting); sql = getSQL(unfolding, getSignature(strquery)); } return sql; } /** * Returns the number of tuples returned by the query */ public int getTupleCount(String query) throws Exception { String unf = getUnfolding(query); String newsql = "SELECT count(*) FROM (" + unf + ") t1"; if (!canceled) { ResultSet set = sqlstatement.executeQuery(newsql); if (set.next()) { return set.getInt(1); } else { throw new Exception("Tuple count faild due to empty result set."); } } else { throw new Exception("Action canceled."); } } /** * Checks whether the given query is boolean or not * * @param dp * the given datalog program * @return true if the query is boolean, false otherwise */ private boolean isDPBoolean(DatalogProgram dp) { List<CQIE> rules = dp.getRules(); Iterator<CQIE> it = rules.iterator(); boolean bool = true; while (it.hasNext() && bool) { CQIE query = it.next(); Atom a = query.getHead(); if (a.getTerms().size() != 0) { bool = false; } } return bool; } @Override public void close() throws OBDAException { try { sqlstatement.close(); } catch (SQLException e) { throw new OBDAException(e); } } // private DatalogProgram getDatalogQuery(String query) throws OBDAException // { // SPARQLDatalogTranslator sparqlTranslator = new SPARQLDatalogTranslator(); // // DatalogProgram queryProgram = null; // try { // queryProgram = sparqlTranslator.parse(query); // } catch (QueryException e) { // log.warn(e.getMessage()); // } // // if (queryProgram == null) { // if the SPARQL translator doesn't work, // // use the Datalog parser. // DatalogProgramParser datalogParser = new DatalogProgramParser(); // try { // queryProgram = datalogParser.parse(query); // } catch (RecognitionException e) { // log.warn(e.getMessage()); // queryProgram = null; // } catch (IllegalArgumentException e2) { // log.warn(e2.getMessage()); // } // } // // if (queryProgram == null) // if it is still null // throw new OBDAException("Unsupported syntax"); // // return queryProgram; // } private List<String> getSignature(String query) throws OBDAException { SPARQLDatalogTranslator sparqlTranslator = new SPARQLDatalogTranslator(); List<String> signature = sparqlTranslator.getSignature(query); return signature; } @Override public void cancel() throws OBDAException { // TODO Auto-generated method stub } @Override public int executeUpdate(String query) throws OBDAException { // TODO Auto-generated method stub return 0; } @Override public int getFetchSize() throws OBDAException { try { return sqlstatement.getFetchSize(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public int getMaxRows() throws OBDAException { try { return sqlstatement.getMaxRows(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void getMoreResults() throws OBDAException { try { sqlstatement.getMoreResults(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void setFetchSize(int rows) throws OBDAException { try { sqlstatement.setFetchSize(rows); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void setMaxRows(int max) throws OBDAException { try { sqlstatement.setMaxRows(max); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void setQueryTimeout(int seconds) throws OBDAException { try { sqlstatement.setQueryTimeout(seconds); } catch (SQLException e) { throw new OBDAException(e); } } public void setUnfoldingProgram(DatalogProgram unfoldingProgram) { this.unfoldingProgram = unfoldingProgram; } @Override public OBDAConnection getConnection() throws OBDAException { return conn; } @Override public OBDAResultSet getResultSet() throws OBDAException { // TODO Auto-generated method stub return null; } @Override public int getQueryTimeout() throws OBDAException { try { return sqlstatement.getQueryTimeout(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public boolean isClosed() throws OBDAException { try { return sqlstatement.isClosed(); } catch (SQLException e) { throw new OBDAException(e); } } /*** * Inserts a stream of ABox assertions into the repository. * * @param data * @param recreateIndexes * Indicates if indexes (if any) should be droped before * inserting the tuples and recreated afterwards. Note, if no * index existed before the insert no drop will be done and no * new index will be created. * @throws SQLException */ public int insertData(Iterator<Assertion> data, boolean useFile, int commit, int batch) throws SQLException { int result = -1; if (!useFile) result = repository.insertData(conn.conn, data, commit, batch); else { try { // File temporalFile = new File("quest-copy.tmp"); // FileOutputStream os = new FileOutputStream(temporalFile); result = (int) repository.loadWithFile(conn.conn, data); // os.close(); } catch (IOException e) { log.error(e.getMessage()); } } return result; } /*** * As before, but using recreateIndexes = false. * * @param data * @throws SQLException */ public int insertData(Iterator<Assertion> data, int commit, int batch) throws SQLException { return insertData(data, false, commit, batch); } public void createIndexes() throws Exception { repository.createIndexes(conn.conn); } public void dropIndexes() throws Exception { repository.dropIndexes(conn.conn); } public boolean isIndexed() { if (repository == null) return false; return repository.isIndexed(conn.conn); } public void dropRepository() throws SQLException { if (repository == null) return; repository.dropDBSchema(conn.conn); } /*** * In an ABox store (classic) this methods triggers the generation of the * schema and the insertion of the metadata. * * @throws SQLException */ public void createDB() throws SQLException { repository.createDBSchema(conn.conn, false); repository.insertMetadata(conn.conn); } public void analyze() throws Exception { repository.collectStatistics(conn.conn); } }
obdalib/reformulation-core/src/main/java/it/unibz/krdb/obda/owlrefplatform/core/QuestStatement.java
package it.unibz.krdb.obda.owlrefplatform.core; import it.unibz.krdb.obda.codec.DatalogProgramToTextCodec; import it.unibz.krdb.obda.model.Atom; import it.unibz.krdb.obda.model.CQIE; import it.unibz.krdb.obda.model.DatalogProgram; import it.unibz.krdb.obda.model.OBDAConnection; import it.unibz.krdb.obda.model.OBDADataFactory; import it.unibz.krdb.obda.model.OBDAException; import it.unibz.krdb.obda.model.OBDAModel; import it.unibz.krdb.obda.model.OBDAQuery; import it.unibz.krdb.obda.model.OBDAResultSet; import it.unibz.krdb.obda.model.OBDAStatement; import it.unibz.krdb.obda.model.Term; import it.unibz.krdb.obda.model.impl.OBDADataFactoryImpl; import it.unibz.krdb.obda.model.impl.OBDAVocabulary; import it.unibz.krdb.obda.ontology.Assertion; import it.unibz.krdb.obda.owlrefplatform.core.abox.RDBMSDataRepositoryManager; import it.unibz.krdb.obda.owlrefplatform.core.basicoperations.QueryVocabularyValidator; import it.unibz.krdb.obda.owlrefplatform.core.reformulation.QueryRewriter; import it.unibz.krdb.obda.owlrefplatform.core.resultset.BooleanOWLOBDARefResultSet; import it.unibz.krdb.obda.owlrefplatform.core.resultset.EmptyQueryResultSet; import it.unibz.krdb.obda.owlrefplatform.core.resultset.OWLOBDARefResultSet; import it.unibz.krdb.obda.owlrefplatform.core.srcquerygeneration.SourceQueryGenerator; import it.unibz.krdb.obda.owlrefplatform.core.unfolding.UnfoldingMechanism; import it.unibz.krdb.obda.parser.DatalogProgramParser; import it.unibz.krdb.obda.parser.SPARQLDatalogTranslator; import java.io.IOException; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.ArrayList; import java.util.Iterator; import java.util.LinkedList; import java.util.List; import java.util.Vector; import java.util.regex.Pattern; import org.antlr.runtime.RecognitionException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.hp.hpl.jena.query.QueryException; /** * The obda statement provides the implementations necessary to query the * reformulation platform reasoner from outside, i.e. protege * * */ public class QuestStatement implements OBDAStatement { private QueryRewriter rewriter = null; private UnfoldingMechanism unfoldingmechanism = null; private SourceQueryGenerator querygenerator = null; private QueryVocabularyValidator validator = null; private OBDAModel unfoldingOBDAModel = null; private boolean canceled = false; Logger log = LoggerFactory.getLogger(QuestStatement.class); private Statement sqlstatement; private RDBMSDataRepositoryManager repository; private DatalogProgram unfoldingProgram; private QuestConnection conn; protected Quest questInstance; public QuestStatement(Quest questinstance, QuestConnection conn, Statement st) { this.questInstance = questinstance; this.repository = questinstance.dataRepository; this.conn = conn; this.rewriter = questinstance.rewriter; this.unfoldingmechanism = questinstance.unfolder; this.querygenerator = questinstance.datasourceQueryGenerator; // this.engine = eng; this.sqlstatement = st; this.validator = questinstance.vocabularyValidator; // this.query = query; this.unfoldingOBDAModel = questinstance.unfoldingOBDAModel; } /** * Returns the result set for the given query */ @Override public OBDAResultSet execute(String strquery) throws OBDAException { if (strquery.split("[eE][tT][aA][bB][lL][eE]").length > 1) { return executeEpistemicQuery(strquery); } if (strquery.contains("/*direct*/")) { return executeDirectQuery(strquery); } else { return executeConjunctiveQuery(strquery); } } /*** * This method will 'chop' the original query into the subqueries, computing * the SQL for each of the nested query and composing everything into a * single SQL. * * @param strquery * @return * @throws Exception */ private OBDAResultSet executeEpistemicQuery(String strquery) throws OBDAException { try { OBDAResultSet result; String epistemicUnfolding = getSQLEpistemic(strquery); ResultSet set = sqlstatement.executeQuery(epistemicUnfolding); int columnCount = set.getMetaData().getColumnCount(); List<Term> typing = getDefaultTypingSignature(columnCount); result = new OWLOBDARefResultSet(set, typing, this); return result; } catch (Exception e) { throw new OBDAException(e); } } private OBDAResultSet executeDirectQuery(String query) throws OBDAException { try { OBDAResultSet result; ResultSet set = sqlstatement.executeQuery(query); int columnCount = set.getMetaData().getColumnCount(); List<Term> typing = getDefaultTypingSignature(columnCount); result = new OWLOBDARefResultSet(set, typing, this); return result; } catch (Exception e) { throw new OBDAException(e); } } private List<Term> getDefaultTypingSignature(int columnCount) { OBDADataFactory fac = OBDADataFactoryImpl.getInstance(); List<Term> signatureTyping = new ArrayList<Term>(); for (int i = 0; i < columnCount; i++) { signatureTyping.add(fac.getFunctionalTerm(OBDAVocabulary.RDFS_LITERAL, fac.getVariable("x"))); } return signatureTyping; } // private DatalogProgram getRewriting(DatalogProgram) { // // } // /*** * Translates a SPARQL query into Datalog dealing with equivalences and * verifying that the vocabulary of the query matches the one in the * ontology. If there are equivalences to handle, this is where its done * (i.e., renaming atoms that use predicates that have been replaced by a * canonical one. * * @param query * @return */ private DatalogProgram translateAndPreProcess(String strquery) throws OBDAException { // Contruct the datalog program object from the query string log.debug("Input user query:\n" + strquery); SPARQLDatalogTranslator sparqlTranslator = new SPARQLDatalogTranslator(); DatalogProgram program = null; try { program = sparqlTranslator.parse(strquery); } catch (QueryException e) { log.warn(e.getMessage()); } if (program == null) { // if the SPARQL translator doesn't work, // use the Datalog parser. DatalogProgramParser datalogParser = new DatalogProgramParser(); try { program = datalogParser.parse(strquery); } catch (RecognitionException e) { log.warn(e.getMessage()); program = null; } catch (IllegalArgumentException e2) { log.warn(e2.getMessage()); } } // Check the datalog object if (validator != null) { log.debug("Validating the user query..."); boolean isValid = validator.validate(program); if (!isValid) { Vector<String> invalidList = validator.getInvalidPredicates(); String msg = ""; for (String predicate : invalidList) { msg += "- " + predicate + "\n"; } throw new OBDAException("Unknown Classes/Properties in the query: \n" + msg); } } log.debug("Replacing equivalences..."); program = validator.replaceEquivalences(program); return program; } private DatalogProgram getUnfolding(DatalogProgram query) throws OBDAException { log.debug("Start the unfolding process..."); OBDAQuery unfolding = unfoldingmechanism.unfold((DatalogProgram) query); return (DatalogProgram)unfolding; } private String getSQL(DatalogProgram query, List<String> signature) throws OBDAException { if (((DatalogProgram) query).getRules().size() == 0) return ""; log.debug("Producing the SQL string..."); String sql = querygenerator.generateSourceQuery((DatalogProgram) query, signature); return sql; } private OBDAResultSet executeConjunctiveQuery(String strquery) throws OBDAException { List<String> signature = getSignature(strquery); DatalogProgram program = translateAndPreProcess(strquery); log.debug("Start the rewriting process..."); DatalogProgram rewriting = getRewriting(program); DatalogProgram unfolding = getUnfolding(rewriting); String sql = getSQL(unfolding, signature); OBDAResultSet result; log.debug("Executing the query and get the result..."); if (sql.equals("")) { /*** * Empty unfolding, constructing an empty result set */ if (program.getRules().size() < 1) { throw new OBDAException("Error, invalid query"); } result = new EmptyQueryResultSet(signature, this); } else { ResultSet set; try { set = sqlstatement.executeQuery(sql); } catch (SQLException e) { throw new OBDAException(e); } if (isDPBoolean(program)) { result = new BooleanOWLOBDARefResultSet(set, this); } else { List<Term> typingSignature = unfolding.getRules().get(0).getHead().getTerms(); try { result = new OWLOBDARefResultSet(set, typingSignature, this); } catch (SQLException e) { throw new OBDAException(e.getMessage()); } } } log.debug("Finish.\n"); return result; } /** * Returns the final rewriting of the given query */ public String getRewriting(String strquery) throws Exception { // TODO FIX to limit to SPARQL input and output DatalogProgram program = translateAndPreProcess(strquery); OBDAQuery rewriting = rewriter.rewrite(program); DatalogProgramToTextCodec codec = new DatalogProgramToTextCodec(unfoldingOBDAModel); return codec.encode((DatalogProgram) rewriting); } /** * Returns the final rewriting of the given query */ public DatalogProgram getRewriting(DatalogProgram program) throws OBDAException { OBDAQuery rewriting = rewriter.rewrite(program); return (DatalogProgram) rewriting; } private String getSQLEpistemic(String strquery) throws OBDAException { // FIRST WE try to analyze the query to find the CQs and the SQL part LinkedList<String> sql = new LinkedList<String>(); LinkedList<String> cqs = new LinkedList<String>(); StringBuffer query = new StringBuffer(strquery); Pattern pattern = Pattern.compile("[eE][tT][aA][bB][lL][eE]\\s*\\((\\r?\\n|\\n|.)+?\\)", Pattern.MULTILINE); while (true) { String[] splitquery = pattern.split(query.toString(), 2); if (splitquery.length > 1) { sql.add(splitquery[0]); query.delete(0, splitquery[0].length()); int position = query.toString().indexOf(splitquery[1]); String regex = query.toString().substring(0, position); cqs.add(regex.substring(regex.indexOf("(") + 1, regex.length() - 1)); query = new StringBuffer(splitquery[1]); } else { sql.add(splitquery[0]); break; } } // Now we generate the SQL for each CQ SPARQLDatalogTranslator t = new SPARQLDatalogTranslator(); LinkedList<String> sqlforcqs = new LinkedList<String>(); log.debug("Found {} embedded queries.", cqs.size()); for (int i = 0; i < cqs.size(); i++) { log.debug("Processing embedded query #{}", i); String cq = cqs.get(i); try { DatalogProgram p = t.parse(cq); List<String> signature = getSignature(cq); String finasql = getSQL(p, signature); log.debug("SQL: {}", finasql); sqlforcqs.add(finasql); } catch (Exception e) { log.error("Error processing nested query #{}", i); log.error(e.getMessage(), e); throw new OBDAException("Error processing nested query #" + i, e); } } // Now we concatenate the simple SQL with the rewritten SQL to generate // the query over DB. log.debug("Forming the final SQL."); StringBuffer finalquery = new StringBuffer(); for (int i = 0; i < sql.size(); i++) { finalquery.append(sql.get(i)); if (sqlforcqs.size() > i) { finalquery.append("("); finalquery.append(sqlforcqs.get(i)); finalquery.append(")"); } } log.debug("Final SQL query: {}", finalquery.toString()); if (finalquery.toString().equals("")) throw new OBDAException("Invalid SQL. The SQL query cannot be empty"); return finalquery.toString(); } /** * Returns the final rewriting of the given query */ public String getUnfolding(String strquery) throws Exception { String sql = null; if (strquery.split("[eE][tT][aA][bB][lL][eE]").length > 1) { sql = getSQLEpistemic(strquery); } if (strquery.contains("/*direct*/")) { sql = strquery; } else { DatalogProgram p = translateAndPreProcess(strquery); DatalogProgram rewriting = getRewriting(p); DatalogProgram unfolding = getUnfolding(rewriting); sql = getSQL(unfolding, getSignature(strquery)); } return sql; } /** * Returns the number of tuples returned by the query */ public int getTupleCount(String query) throws Exception { String unf = getUnfolding(query); String newsql = "SELECT count(*) FROM (" + unf + ") t1"; if (!canceled) { ResultSet set = sqlstatement.executeQuery(newsql); if (set.next()) { return set.getInt(1); } else { throw new Exception("Tuple count faild due to empty result set."); } } else { throw new Exception("Action canceled."); } } /** * Checks whether the given query is boolean or not * * @param dp * the given datalog program * @return true if the query is boolean, false otherwise */ private boolean isDPBoolean(DatalogProgram dp) { List<CQIE> rules = dp.getRules(); Iterator<CQIE> it = rules.iterator(); boolean bool = true; while (it.hasNext() && bool) { CQIE query = it.next(); Atom a = query.getHead(); if (a.getTerms().size() != 0) { bool = false; } } return bool; } @Override public void close() throws OBDAException { try { sqlstatement.close(); } catch (SQLException e) { throw new OBDAException(e); } } // private DatalogProgram getDatalogQuery(String query) throws OBDAException // { // SPARQLDatalogTranslator sparqlTranslator = new SPARQLDatalogTranslator(); // // DatalogProgram queryProgram = null; // try { // queryProgram = sparqlTranslator.parse(query); // } catch (QueryException e) { // log.warn(e.getMessage()); // } // // if (queryProgram == null) { // if the SPARQL translator doesn't work, // // use the Datalog parser. // DatalogProgramParser datalogParser = new DatalogProgramParser(); // try { // queryProgram = datalogParser.parse(query); // } catch (RecognitionException e) { // log.warn(e.getMessage()); // queryProgram = null; // } catch (IllegalArgumentException e2) { // log.warn(e2.getMessage()); // } // } // // if (queryProgram == null) // if it is still null // throw new OBDAException("Unsupported syntax"); // // return queryProgram; // } private List<String> getSignature(String query) throws OBDAException { SPARQLDatalogTranslator sparqlTranslator = new SPARQLDatalogTranslator(); List<String> signature = sparqlTranslator.getSignature(query); return signature; } @Override public void cancel() throws OBDAException { // TODO Auto-generated method stub } @Override public int executeUpdate(String query) throws OBDAException { // TODO Auto-generated method stub return 0; } @Override public int getFetchSize() throws OBDAException { try { return sqlstatement.getFetchSize(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public int getMaxRows() throws OBDAException { try { return sqlstatement.getMaxRows(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void getMoreResults() throws OBDAException { try { sqlstatement.getMoreResults(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void setFetchSize(int rows) throws OBDAException { try { sqlstatement.setFetchSize(rows); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void setMaxRows(int max) throws OBDAException { try { sqlstatement.setMaxRows(max); } catch (SQLException e) { throw new OBDAException(e); } } @Override public void setQueryTimeout(int seconds) throws OBDAException { try { sqlstatement.setQueryTimeout(seconds); } catch (SQLException e) { throw new OBDAException(e); } } public void setUnfoldingProgram(DatalogProgram unfoldingProgram) { this.unfoldingProgram = unfoldingProgram; } @Override public OBDAConnection getConnection() throws OBDAException { return conn; } @Override public OBDAResultSet getResultSet() throws OBDAException { // TODO Auto-generated method stub return null; } @Override public int getQueryTimeout() throws OBDAException { try { return sqlstatement.getQueryTimeout(); } catch (SQLException e) { throw new OBDAException(e); } } @Override public boolean isClosed() throws OBDAException { try { return sqlstatement.isClosed(); } catch (SQLException e) { throw new OBDAException(e); } } /*** * Inserts a stream of ABox assertions into the repository. * * @param data * @param recreateIndexes * Indicates if indexes (if any) should be droped before * inserting the tuples and recreated afterwards. Note, if no * index existed before the insert no drop will be done and no * new index will be created. * @throws SQLException */ public int insertData(Iterator<Assertion> data, boolean useFile, int commit, int batch) throws SQLException { int result = -1; if (!useFile) result = repository.insertData(conn.conn, data, commit, batch); else { try { // File temporalFile = new File("quest-copy.tmp"); // FileOutputStream os = new FileOutputStream(temporalFile); result = (int) repository.loadWithFile(conn.conn, data); // os.close(); } catch (IOException e) { log.error(e.getMessage()); } } return result; } /*** * As before, but using recreateIndexes = false. * * @param data * @throws SQLException */ public int insertData(Iterator<Assertion> data, int commit, int batch) throws SQLException { return insertData(data, false, commit, batch); } public void createIndexes() throws Exception { repository.createIndexes(conn.conn); } public void dropIndexes() throws Exception { repository.dropIndexes(conn.conn); } public boolean isIndexed() { if (repository == null) return false; return repository.isIndexed(conn.conn); } public void dropRepository() throws SQLException { if (repository == null) return; repository.dropDBSchema(conn.conn); } /*** * In an ABox store (classic) this methods triggers the generation of the * schema and the insertion of the metadata. * * @throws SQLException */ public void createDB() throws SQLException { repository.createDBSchema(conn.conn, false); repository.insertMetadata(conn.conn); } public void analyze() throws Exception { repository.collectStatistics(conn.conn); } }
* Fixed issue with get SQL, and execute for epistemic queries
obdalib/reformulation-core/src/main/java/it/unibz/krdb/obda/owlrefplatform/core/QuestStatement.java
* Fixed issue with get SQL, and execute for epistemic queries
Java
apache-2.0
6a6a4cce82e72c27ec9899359ab67f9e1ccaf424
0
WNPRC-EHR-Services/wnprc-modules,WNPRC-EHR-Services/wnprc-modules,WNPRC-EHR-Services/wnprc-modules,WNPRC-EHR-Services/wnprc-modules,WNPRC-EHR-Services/wnprc-modules,WNPRC-EHR-Services/wnprc-modules,WNPRC-EHR-Services/wnprc-modules
package org.labkey.dbutils.api.schema; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.labkey.api.collections.NamedObjectList; import org.labkey.api.data.ButtonBarConfig; import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerFilter; import org.labkey.api.data.DatabaseTableType; import org.labkey.api.data.DbSchema; import org.labkey.api.data.MethodInfo; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.TableInfo; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.exp.property.Domain; import org.labkey.api.exp.property.DomainKind; import org.labkey.api.query.AggregateRowConfig; import org.labkey.api.query.BatchValidationException; import org.labkey.api.query.FieldKey; import org.labkey.api.query.QueryException; import org.labkey.api.query.QueryService; import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.SchemaTreeVisitor; import org.labkey.api.query.UserSchema; import org.labkey.api.query.ValidationException; import org.labkey.api.security.User; import org.labkey.api.security.UserPrincipal; import org.labkey.api.security.permissions.Permission; import org.labkey.api.util.ContainerContext; import org.labkey.api.util.Pair; import org.labkey.api.util.Path; import org.labkey.api.util.StringExpression; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewContext; import org.labkey.data.xml.TableType; import org.labkey.data.xml.queryCustomView.FilterType; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** * This is just a decorator for TableInfo. It allows you to inherit from this class anonymously * to override interface methods on an instance of TableInfo. * * Created by jon on 10/26/16. */ public class DecoratedTableInfo implements TableInfo { protected TableInfo _tableInfo; public DecoratedTableInfo(@NotNull TableInfo tableInfo) { _tableInfo = tableInfo; } @Override public String getName() { return _tableInfo.getName(); } @Override public <R, P> R accept(SchemaTreeVisitor<R, P> visitor, SchemaTreeVisitor.Path path, P param) { return _tableInfo.accept(visitor, path, param); } @Override public String getTitle() { return _tableInfo.getTitle(); } @Nullable @Override public String getTitleField() { return _tableInfo.getTitleField(); } @Nullable @Override public String getSelectName() { return _tableInfo.getSelectName(); } @Nullable @Override public String getMetaDataName() { return _tableInfo.getMetaDataName(); } @NotNull @Override public SQLFragment getFromSQL(String alias) { return _tableInfo.getFromSQL(alias); } @Override public SQLFragment getFromSQL(String alias, Set<FieldKey> cols) { return _tableInfo.getFromSQL(alias, cols); } @Override public DbSchema getSchema() { return _tableInfo.getSchema(); } @Nullable @Override public UserSchema getUserSchema() { return _tableInfo.getUserSchema(); } @Override public SqlDialect getSqlDialect() { return _tableInfo.getSqlDialect(); } @Override public List<String> getPkColumnNames() { return _tableInfo.getPkColumnNames(); } @NotNull @Override public List<ColumnInfo> getPkColumns() { return _tableInfo.getPkColumns(); } @NotNull @Override public Map<String, Pair<IndexType, List<ColumnInfo>>> getUniqueIndices() { return _tableInfo.getUniqueIndices(); } @NotNull @Override public Map<String, Pair<IndexType, List<ColumnInfo>>> getAllIndices() { return _tableInfo.getAllIndices(); } @NotNull @Override public List<ColumnInfo> getAlternateKeyColumns() { return _tableInfo.getAlternateKeyColumns(); } @Override public ColumnInfo getVersionColumn() { return _tableInfo.getVersionColumn(); } @Override public String getVersionColumnName() { return _tableInfo.getVersionColumnName(); } @Override public String getTitleColumn() { return _tableInfo.getVersionColumnName(); } @Override public boolean hasDefaultTitleColumn() { return _tableInfo.hasDefaultTitleColumn(); } @Override public DatabaseTableType getTableType() { return _tableInfo.getTableType(); } @Override public @NotNull NamedObjectList getSelectList(String columnName, List<FilterType> filters, Integer maxRows, String titleColumn) { return _tableInfo.getSelectList(columnName, filters, maxRows, titleColumn); } @Override public ColumnInfo getColumn(@NotNull String colName) { return _tableInfo.getColumn(colName); } @Override public ColumnInfo getColumn(@NotNull FieldKey colName) { return _tableInfo.getColumn(colName); } @Override public List<ColumnInfo> getColumns() { return _tableInfo.getColumns(); } @Override public List<ColumnInfo> getUserEditableColumns() { return _tableInfo.getUserEditableColumns(); } @Override public List<ColumnInfo> getColumns(String colNames) { return _tableInfo.getColumns(colNames); } @Override public List<ColumnInfo> getColumns(String... colNameArray) { // Varargs can be passed as an array or list of arguments, so this'll work. return _tableInfo.getColumns(colNameArray); } @Override public Set<String> getColumnNameSet() { return _tableInfo.getColumnNameSet(); } @Override public Map<FieldKey, ColumnInfo> getExtendedColumns(boolean includeHidden) { return _tableInfo.getExtendedColumns(includeHidden); } @Override public List<FieldKey> getDefaultVisibleColumns() { return _tableInfo.getDefaultVisibleColumns(); } @Override public void setDefaultVisibleColumns(@Nullable Iterable<FieldKey> keys) { _tableInfo.setDefaultVisibleColumns(keys); } @Override public ButtonBarConfig getButtonBarConfig() { return _tableInfo.getButtonBarConfig(); } @Override public AggregateRowConfig getAggregateRowConfig() { return _tableInfo.getAggregateRowConfig(); } @Override public ActionURL getGridURL(Container container) { return _tableInfo.getGridURL(container); } @Override public ActionURL getInsertURL(Container container) { return _tableInfo.getInsertURL(container); } @Override public ActionURL getImportDataURL(Container container) { return _tableInfo.getImportDataURL(container); } @Override public ActionURL getDeleteURL(Container container) { return _tableInfo.getDeleteURL(container); } @Override public StringExpression getUpdateURL(@Nullable Set<FieldKey> columns, Container container) { return _tableInfo.getUpdateURL(columns, container); } @Override public StringExpression getDetailsURL(@Nullable Set<FieldKey> columns, Container container) { return _tableInfo.getDetailsURL(columns, container); } @Override public boolean hasDetailsURL() { return _tableInfo.hasDetailsURL(); } @Override public MethodInfo getMethod(String name) { return _tableInfo.getMethod(name); } @Override public String getImportMessage() { return _tableInfo.getImportMessage(); } @Override public List<Pair<String, String>> getImportTemplates(ViewContext ctx) { return _tableInfo.getImportTemplates(ctx); } @Override public List<Pair<String, StringExpression>> getRawImportTemplates() { return _tableInfo.getRawImportTemplates(); } @Override public boolean isPublic() { return _tableInfo.isPublic(); } @Override public String getPublicName() { return _tableInfo.getPublicName(); } @Override public String getPublicSchemaName() { return _tableInfo.getPublicSchemaName(); } @Override public boolean hasContainerColumn() { return _tableInfo.hasContainerColumn(); } @Override public boolean needsContainerClauseAdded() { return _tableInfo.needsContainerClauseAdded(); } @Override public ContainerFilter getContainerFilter() { return _tableInfo.getContainerFilter(); } @Override public void overlayMetadata(String tableName, UserSchema schema, Collection<QueryException> errors) { _tableInfo.overlayMetadata(tableName, schema, errors); } @Override public void overlayMetadata(Collection<TableType> metadata, UserSchema schema, Collection<QueryException> errors) { _tableInfo.overlayMetadata(metadata, schema, errors); } @Override public boolean isMetadataOverrideable() { return _tableInfo.isMetadataOverrideable(); } @Override public ColumnInfo getLookupColumn(ColumnInfo parent, String name) { return _tableInfo.getLookupColumn(parent, name); } @Override public int getCacheSize() { return _tableInfo.getCacheSize(); } @Override public String getDescription() { return _tableInfo.getDescription(); } @Nullable @Override public Domain getDomain() { return _tableInfo.getDomain(); } @Nullable @Override public DomainKind getDomainKind() { return _tableInfo.getDomainKind(); } @Nullable @Override public QueryUpdateService getUpdateService() { return _tableInfo.getUpdateService(); } @NotNull @Override public Collection<QueryService.ParameterDecl> getNamedParameters() { return _tableInfo.getNamedParameters(); } @Override public void fireBatchTrigger(Container c, User u,TriggerType type, boolean before, BatchValidationException errors, Map<String, Object> extraContext) throws BatchValidationException { _tableInfo.fireBatchTrigger(c, u, type, before, errors, extraContext); } @Override public void fireRowTrigger(Container c, User u, TriggerType type, boolean before, int rowNumber, @Nullable Map<String, Object> newRow, @Nullable Map<String, Object> oldRow, Map<String, Object> extraContext) throws ValidationException { _tableInfo.fireRowTrigger(c, u, type, before, rowNumber, newRow, oldRow, extraContext); } @Override public boolean hasTriggers(Container c) { return _tableInfo.hasTriggers(c); } @Override public void resetTriggers(Container c) { _tableInfo.resetTriggers(c); } @Override public Path getNotificationKey() { return _tableInfo.getNotificationKey(); } @Override public void setLocked(boolean b) { _tableInfo.setLocked(b); } @Override public boolean isLocked() { return _tableInfo.isLocked(); } @Override public boolean supportsContainerFilter() { return _tableInfo.supportsContainerFilter(); } @Override public boolean hasUnionTable() { return _tableInfo.hasUnionTable(); } @Nullable @Override public ContainerContext getContainerContext() { return _tableInfo.getContainerContext(); } @Override public FieldKey getContainerFieldKey(){return _tableInfo.getContainerFieldKey();} @Override public Set<ColumnInfo> getAllInvolvedColumns(Collection<ColumnInfo> selectColumns) { return _tableInfo.getAllInvolvedColumns(selectColumns); } @Override public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class<? extends Permission> perm) { return _tableInfo.hasPermission(user, perm); } @Override public boolean supportsAuditTracking() { return _tableInfo.supportsAuditTracking(); } }
DBUtils/api-src/org/labkey/dbutils/api/schema/DecoratedTableInfo.java
package org.labkey.dbutils.api.schema; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.labkey.api.collections.NamedObjectList; import org.labkey.api.data.ButtonBarConfig; import org.labkey.api.data.ColumnInfo; import org.labkey.api.data.Container; import org.labkey.api.data.ContainerFilter; import org.labkey.api.data.DatabaseTableType; import org.labkey.api.data.DbSchema; import org.labkey.api.data.MethodInfo; import org.labkey.api.data.SQLFragment; import org.labkey.api.data.TableInfo; import org.labkey.api.data.dialect.SqlDialect; import org.labkey.api.exp.property.Domain; import org.labkey.api.exp.property.DomainKind; import org.labkey.api.gwt.client.AuditBehaviorType; import org.labkey.api.query.AggregateRowConfig; import org.labkey.api.query.BatchValidationException; import org.labkey.api.query.FieldKey; import org.labkey.api.query.QueryException; import org.labkey.api.query.QueryService; import org.labkey.api.query.QueryUpdateService; import org.labkey.api.query.SchemaTreeVisitor; import org.labkey.api.query.UserSchema; import org.labkey.api.query.ValidationException; import org.labkey.api.security.User; import org.labkey.api.security.UserPrincipal; import org.labkey.api.security.permissions.Permission; import org.labkey.api.util.ContainerContext; import org.labkey.api.util.Pair; import org.labkey.api.util.Path; import org.labkey.api.util.StringExpression; import org.labkey.api.view.ActionURL; import org.labkey.api.view.ViewContext; import org.labkey.data.xml.TableType; import org.labkey.data.xml.queryCustomView.FilterType; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.Set; /** * This is just a decorator for TableInfo. It allows you to inherit from this class anonymously * to override interface methods on an instance of TableInfo. * * Created by jon on 10/26/16. */ public class DecoratedTableInfo implements TableInfo { protected TableInfo _tableInfo; public DecoratedTableInfo(@NotNull TableInfo tableInfo) { _tableInfo = tableInfo; } @Override public String getName() { return _tableInfo.getName(); } @Override public <R, P> R accept(SchemaTreeVisitor<R, P> visitor, SchemaTreeVisitor.Path path, P param) { return _tableInfo.accept(visitor, path, param); } @Override public String getTitle() { return _tableInfo.getTitle(); } @Nullable @Override public String getTitleField() { return _tableInfo.getTitleField(); } @Nullable @Override public String getSelectName() { return _tableInfo.getSelectName(); } @Nullable @Override public String getMetaDataName() { return _tableInfo.getMetaDataName(); } @NotNull @Override public SQLFragment getFromSQL(String alias) { return _tableInfo.getFromSQL(alias); } @Override public SQLFragment getFromSQL(String alias, Set<FieldKey> cols) { return _tableInfo.getFromSQL(alias, cols); } @Override public DbSchema getSchema() { return _tableInfo.getSchema(); } @Nullable @Override public UserSchema getUserSchema() { return _tableInfo.getUserSchema(); } @Override public SqlDialect getSqlDialect() { return _tableInfo.getSqlDialect(); } @Override public List<String> getPkColumnNames() { return _tableInfo.getPkColumnNames(); } @NotNull @Override public List<ColumnInfo> getPkColumns() { return _tableInfo.getPkColumns(); } @NotNull @Override public Map<String, Pair<IndexType, List<ColumnInfo>>> getUniqueIndices() { return _tableInfo.getUniqueIndices(); } @NotNull @Override public Map<String, Pair<IndexType, List<ColumnInfo>>> getAllIndices() { return _tableInfo.getAllIndices(); } @NotNull @Override public List<ColumnInfo> getAlternateKeyColumns() { return _tableInfo.getAlternateKeyColumns(); } @Override public ColumnInfo getVersionColumn() { return _tableInfo.getVersionColumn(); } @Override public String getVersionColumnName() { return _tableInfo.getVersionColumnName(); } @Override public String getTitleColumn() { return _tableInfo.getVersionColumnName(); } @Override public boolean hasDefaultTitleColumn() { return _tableInfo.hasDefaultTitleColumn(); } @Override public DatabaseTableType getTableType() { return _tableInfo.getTableType(); } @Override @NotNull NamedObjectList getSelectList(String columnName, List<FilterType> filters, Integer maxRows, String titleColumn); { return _tableInfo.getSelectList(columnName, filters, maxRows, titleColumn); } @Override public ColumnInfo getColumn(@NotNull String colName) { return _tableInfo.getColumn(colName); } @Override public ColumnInfo getColumn(@NotNull FieldKey colName) { return _tableInfo.getColumn(colName); } @Override public List<ColumnInfo> getColumns() { return _tableInfo.getColumns(); } @Override public List<ColumnInfo> getUserEditableColumns() { return _tableInfo.getUserEditableColumns(); } @Override public List<ColumnInfo> getColumns(String colNames) { return _tableInfo.getColumns(colNames); } @Override public List<ColumnInfo> getColumns(String... colNameArray) { // Varargs can be passed as an array or list of arguments, so this'll work. return _tableInfo.getColumns(colNameArray); } @Override public Set<String> getColumnNameSet() { return _tableInfo.getColumnNameSet(); } @Override public Map<FieldKey, ColumnInfo> getExtendedColumns(boolean includeHidden) { return _tableInfo.getExtendedColumns(includeHidden); } @Override public List<FieldKey> getDefaultVisibleColumns() { return _tableInfo.getDefaultVisibleColumns(); } @Override public void setDefaultVisibleColumns(@Nullable Iterable<FieldKey> keys) { _tableInfo.setDefaultVisibleColumns(keys); } @Override public ButtonBarConfig getButtonBarConfig() { return _tableInfo.getButtonBarConfig(); } @Override public AggregateRowConfig getAggregateRowConfig() { return _tableInfo.getAggregateRowConfig(); } @Override public ActionURL getGridURL(Container container) { return _tableInfo.getGridURL(container); } @Override public ActionURL getInsertURL(Container container) { return _tableInfo.getInsertURL(container); } @Override public ActionURL getImportDataURL(Container container) { return _tableInfo.getImportDataURL(container); } @Override public ActionURL getDeleteURL(Container container) { return _tableInfo.getDeleteURL(container); } @Override public StringExpression getUpdateURL(@Nullable Set<FieldKey> columns, Container container) { return _tableInfo.getUpdateURL(columns, container); } @Override public StringExpression getDetailsURL(@Nullable Set<FieldKey> columns, Container container) { return _tableInfo.getDetailsURL(columns, container); } @Override public boolean hasDetailsURL() { return _tableInfo.hasDetailsURL(); } @Override public MethodInfo getMethod(String name) { return _tableInfo.getMethod(name); } @Override public String getImportMessage() { return _tableInfo.getImportMessage(); } @Override public List<Pair<String, String>> getImportTemplates(ViewContext ctx) { return _tableInfo.getImportTemplates(ctx); } @Override public List<Pair<String, StringExpression>> getRawImportTemplates() { return _tableInfo.getRawImportTemplates(); } @Override public boolean isPublic() { return _tableInfo.isPublic(); } @Override public String getPublicName() { return _tableInfo.getPublicName(); } @Override public String getPublicSchemaName() { return _tableInfo.getPublicSchemaName(); } @Override public boolean hasContainerColumn() { return _tableInfo.hasContainerColumn(); } @Override public boolean needsContainerClauseAdded() { return _tableInfo.needsContainerClauseAdded(); } @Override public ContainerFilter getContainerFilter() { return _tableInfo.getContainerFilter(); } @Override public void overlayMetadata(String tableName, UserSchema schema, Collection<QueryException> errors) { _tableInfo.overlayMetadata(tableName, schema, errors); } @Override public void overlayMetadata(Collection<TableType> metadata, UserSchema schema, Collection<QueryException> errors) { _tableInfo.overlayMetadata(metadata, schema, errors); } @Override public boolean isMetadataOverrideable() { return _tableInfo.isMetadataOverrideable(); } @Override public ColumnInfo getLookupColumn(ColumnInfo parent, String name) { return _tableInfo.getLookupColumn(parent, name); } @Override public int getCacheSize() { return _tableInfo.getCacheSize(); } @Override public String getDescription() { return _tableInfo.getDescription(); } @Nullable @Override public Domain getDomain() { return _tableInfo.getDomain(); } @Nullable @Override public DomainKind getDomainKind() { return _tableInfo.getDomainKind(); } @Nullable @Override public QueryUpdateService getUpdateService() { return _tableInfo.getUpdateService(); } @NotNull @Override public Collection<QueryService.ParameterDecl> getNamedParameters() { return _tableInfo.getNamedParameters(); } @Override public void fireBatchTrigger(Container c, User u,TriggerType type, boolean before, BatchValidationException errors, Map<String, Object> extraContext) throws BatchValidationException { _tableInfo.fireBatchTrigger(c, u, type, before, errors, extraContext); } @Override public void fireRowTrigger(Container c, User u, TriggerType type, boolean before, int rowNumber, @Nullable Map<String, Object> newRow, @Nullable Map<String, Object> oldRow, Map<String, Object> extraContext) throws ValidationException { _tableInfo.fireRowTrigger(c, u, type, before, rowNumber, newRow, oldRow, extraContext); } @Override public boolean hasTriggers(Container c) { return _tableInfo.hasTriggers(c); } @Override public void resetTriggers(Container c) { _tableInfo.resetTriggers(c); } @Override public Path getNotificationKey() { return _tableInfo.getNotificationKey(); } @Override public void setLocked(boolean b) { _tableInfo.setLocked(b); } @Override public boolean isLocked() { return _tableInfo.isLocked(); } @Override public boolean supportsContainerFilter() { return _tableInfo.supportsContainerFilter(); } @Override public boolean hasUnionTable() { return _tableInfo.hasUnionTable(); } @Nullable @Override public ContainerContext getContainerContext() { return _tableInfo.getContainerContext(); } @Override public FieldKey getContainerFieldKey(){return _tableInfo.getContainerFieldKey();} @Override public Set<ColumnInfo> getAllInvolvedColumns(Collection<ColumnInfo> selectColumns) { return _tableInfo.getAllInvolvedColumns(selectColumns); } @Override public boolean hasPermission(@NotNull UserPrincipal user, @NotNull Class<? extends Permission> perm) { return _tableInfo.hasPermission(user, perm); } @Override public boolean supportsAuditTracking() { return _tableInfo.supportsAuditTracking(); } }
Second attempt to fix TableInfo.getSelectList() override, this time from a machine that's building the module
DBUtils/api-src/org/labkey/dbutils/api/schema/DecoratedTableInfo.java
Second attempt to fix TableInfo.getSelectList() override, this time from a machine that's building the module
Java
apache-2.0
f0b763192c0d2b81de97d7f6c6d3c254f60f029c
0
titusfortner/selenium,joshmgrant/selenium,titusfortner/selenium,asolntsev/selenium,Dude-X/selenium,SeleniumHQ/selenium,asolntsev/selenium,SeleniumHQ/selenium,asolntsev/selenium,Ardesco/selenium,titusfortner/selenium,titusfortner/selenium,Ardesco/selenium,Ardesco/selenium,joshmgrant/selenium,joshmgrant/selenium,HtmlUnit/selenium,HtmlUnit/selenium,joshmgrant/selenium,HtmlUnit/selenium,asolntsev/selenium,Ardesco/selenium,titusfortner/selenium,joshmgrant/selenium,Ardesco/selenium,valfirst/selenium,asolntsev/selenium,titusfortner/selenium,valfirst/selenium,valfirst/selenium,joshmgrant/selenium,SeleniumHQ/selenium,SeleniumHQ/selenium,titusfortner/selenium,Dude-X/selenium,asolntsev/selenium,HtmlUnit/selenium,SeleniumHQ/selenium,titusfortner/selenium,Dude-X/selenium,valfirst/selenium,valfirst/selenium,Dude-X/selenium,titusfortner/selenium,valfirst/selenium,Dude-X/selenium,Ardesco/selenium,valfirst/selenium,asolntsev/selenium,Dude-X/selenium,titusfortner/selenium,Ardesco/selenium,HtmlUnit/selenium,asolntsev/selenium,SeleniumHQ/selenium,Ardesco/selenium,SeleniumHQ/selenium,asolntsev/selenium,valfirst/selenium,joshmgrant/selenium,SeleniumHQ/selenium,valfirst/selenium,HtmlUnit/selenium,SeleniumHQ/selenium,valfirst/selenium,Dude-X/selenium,SeleniumHQ/selenium,valfirst/selenium,HtmlUnit/selenium,HtmlUnit/selenium,Dude-X/selenium,SeleniumHQ/selenium,Dude-X/selenium,Ardesco/selenium,HtmlUnit/selenium,titusfortner/selenium,joshmgrant/selenium,joshmgrant/selenium,HtmlUnit/selenium,joshmgrant/selenium,joshmgrant/selenium
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.grid.router; import com.google.common.collect.ImmutableMap; import io.opentelemetry.context.Scope; import io.opentelemetry.trace.Span; import io.opentelemetry.trace.Tracer; import org.openqa.selenium.grid.data.DistributorStatus; import org.openqa.selenium.grid.distributor.Distributor; import org.openqa.selenium.json.Json; import org.openqa.selenium.remote.http.HttpClient; import org.openqa.selenium.remote.http.HttpHandler; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.http.HttpResponse; import org.openqa.selenium.remote.tracing.HttpTracing; import org.openqa.selenium.remote.tracing.TracedCallable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeoutException; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.toList; import static org.openqa.selenium.json.Json.MAP_TYPE; import static org.openqa.selenium.remote.http.Contents.string; import static org.openqa.selenium.remote.http.Contents.utf8String; import static org.openqa.selenium.remote.http.HttpMethod.GET; import static org.openqa.selenium.remote.tracing.HttpTags.HTTP_RESPONSE; import static org.openqa.selenium.remote.tracing.HttpTracing.newSpanAsChildOf; class GridStatusHandler implements HttpHandler { private static final ScheduledExecutorService SCHEDULED_SERVICE = Executors.newScheduledThreadPool( 1, r -> { Thread thread = new Thread(r, "Scheduled grid status executor"); thread.setDaemon(true); return thread; }); private static final ExecutorService EXECUTOR_SERVICE = Executors.newCachedThreadPool( r -> { Thread thread = new Thread(r, "Grid status executor"); thread.setDaemon(true); return thread; }); private final Json json; private final Tracer tracer; private final HttpClient.Factory clientFactory; private final Distributor distributor; public GridStatusHandler(Json json, Tracer tracer, HttpClient.Factory clientFactory, Distributor distributor) { this.json = Objects.requireNonNull(json, "JSON encoder must be set."); this.tracer = Objects.requireNonNull(tracer, "Tracer must be set."); this.clientFactory = Objects.requireNonNull(clientFactory, "HTTP client factory must be set."); this.distributor = Objects.requireNonNull(distributor, "Distributor must be set."); } @Override public HttpResponse execute(HttpRequest req) { long start = System.currentTimeMillis(); Span span = newSpanAsChildOf(tracer, req, "router.status").startSpan(); try (Scope scope = tracer.withSpan(span)) { DistributorStatus status; try { status = EXECUTOR_SERVICE.submit(new TracedCallable<>(tracer, span, distributor::getStatus)).get(2, SECONDS); } catch (ExecutionException | TimeoutException e) { return new HttpResponse().setContent(utf8String(json.toJson( ImmutableMap.of("value", ImmutableMap.of( "ready", false, "message", "Unable to read distributor status."))))); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return new HttpResponse().setContent(utf8String(json.toJson( ImmutableMap.of("value", ImmutableMap.of( "ready", false, "message", "Reading distributor status was interrupted."))))); } boolean ready = status.hasCapacity(); long remaining = System.currentTimeMillis() + 2000 - start; List<Future<Map<String, Object>>> nodeResults = status.getNodes().stream() .map(summary -> { ImmutableMap<String, Object> defaultResponse = ImmutableMap.of( "id", summary.getNodeId(), "uri", summary.getUri(), "maxSessions", summary.getMaxSessionCount(), "stereotypes", summary.getStereotypes(), "warning", "Unable to read data from node."); CompletableFuture<Map<String, Object>> toReturn = new CompletableFuture<>(); Future<?> future = EXECUTOR_SERVICE.submit( () -> { try { HttpClient client = clientFactory.createClient(summary.getUri().toURL()); HttpRequest nodeStatusReq = new HttpRequest(GET, "/se/grid/node/status"); HttpTracing.inject(tracer, span, nodeStatusReq); HttpResponse res = client.execute(nodeStatusReq); toReturn.complete(res.getStatus() == 200 ? json.toType(string(res), MAP_TYPE) : defaultResponse); } catch (IOException e) { toReturn.complete(defaultResponse); } }); SCHEDULED_SERVICE.schedule( () -> { if (!toReturn.isDone()) { toReturn.complete(defaultResponse); future.cancel(true); } }, remaining, MILLISECONDS); return toReturn; }) .collect(toList()); ImmutableMap.Builder<String, Object> value = ImmutableMap.builder(); value.put("ready", ready); value.put("message", ready ? "Selenium Grid ready." : "Selenium Grid not ready."); value.put("nodes", nodeResults.stream() .map(summary -> { try { return summary.get(); } catch (ExecutionException e) { throw wrap(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw wrap(e); } }) .collect(toList())); HttpResponse res = new HttpResponse().setContent(utf8String(json.toJson( ImmutableMap.of("value", value.build())))); HTTP_RESPONSE.accept(span, res); return res; } finally { span.end(); } } private RuntimeException wrap(Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); return new RuntimeException(e); } Throwable cause = e.getCause(); if (cause == null) { return e instanceof RuntimeException ? (RuntimeException) e : new RuntimeException(e); } return cause instanceof RuntimeException ? (RuntimeException) cause : new RuntimeException(cause); } }
java/server/src/org/openqa/selenium/grid/router/GridStatusHandler.java
// Licensed to the Software Freedom Conservancy (SFC) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The SFC licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. package org.openqa.selenium.grid.router; import com.google.common.collect.ImmutableMap; import io.opentelemetry.context.Scope; import io.opentelemetry.trace.Span; import io.opentelemetry.trace.Tracer; import org.openqa.selenium.grid.data.DistributorStatus; import org.openqa.selenium.grid.distributor.Distributor; import org.openqa.selenium.json.Json; import org.openqa.selenium.remote.http.HttpClient; import org.openqa.selenium.remote.http.HttpHandler; import org.openqa.selenium.remote.http.HttpRequest; import org.openqa.selenium.remote.http.HttpResponse; import org.openqa.selenium.remote.tracing.HttpTracing; import org.openqa.selenium.remote.tracing.TracedCallable; import java.io.IOException; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeoutException; import static java.util.concurrent.TimeUnit.MILLISECONDS; import static java.util.concurrent.TimeUnit.SECONDS; import static java.util.stream.Collectors.toList; import static org.openqa.selenium.json.Json.MAP_TYPE; import static org.openqa.selenium.remote.http.Contents.string; import static org.openqa.selenium.remote.http.Contents.utf8String; import static org.openqa.selenium.remote.http.HttpMethod.GET; import static org.openqa.selenium.remote.tracing.HttpTags.HTTP_RESPONSE; import static org.openqa.selenium.remote.tracing.HttpTracing.newSpanAsChildOf; class GridStatusHandler implements HttpHandler { private static final ScheduledExecutorService SCHEDULED_SERVICE = Executors.newScheduledThreadPool( 1, r -> { Thread thread = new Thread(r, "Scheduled grid status executor"); thread.setDaemon(true); return thread; }); private static final ExecutorService EXECUTOR_SERVICE = Executors.newCachedThreadPool( r -> { Thread thread = new Thread(r, "Grid status executor"); thread.setDaemon(true); return thread; }); private final Json json; private final Tracer tracer; private final HttpClient.Factory clientFactory; private final Distributor distributor; public GridStatusHandler(Json json, Tracer tracer, HttpClient.Factory clientFactory, Distributor distributor) { this.json = Objects.requireNonNull(json, "JSON encoder must be set."); this.tracer = Objects.requireNonNull(tracer, "Tracer must be set."); this.clientFactory = Objects.requireNonNull(clientFactory, "HTTP client factory must be set."); this.distributor = Objects.requireNonNull(distributor, "Distributor must be set."); } @Override public HttpResponse execute(HttpRequest req) { long start = System.currentTimeMillis(); Span span = newSpanAsChildOf(tracer, req, "router.status").startSpan(); try (Scope scope = tracer.withSpan(span)) { DistributorStatus status; try { status = EXECUTOR_SERVICE.submit(new TracedCallable<>(tracer, span, distributor::getStatus)).get(2, SECONDS); } catch (ExecutionException | TimeoutException e) { return new HttpResponse().setContent(utf8String(json.toJson( ImmutableMap.of("value", ImmutableMap.of( "ready", false, "message", "Unable to read distributor status."))))); } catch (InterruptedException e) { Thread.currentThread().interrupt(); return new HttpResponse().setContent(utf8String(json.toJson( ImmutableMap.of("value", ImmutableMap.of( "ready", false, "message", "Reading distributor status was interrupted."))))); } boolean ready = status.hasCapacity(); String message = ready ? "Selenium Grid ready." : "Selenium Grid not ready."; long remaining = System.currentTimeMillis() + 2000 - start; List<Future<Map<String, Object>>> nodeResults = status.getNodes().stream() .map(summary -> { ImmutableMap<String, Object> defaultResponse = ImmutableMap.of( "id", summary.getNodeId(), "uri", summary.getUri(), "maxSessions", summary.getMaxSessionCount(), "stereotypes", summary.getStereotypes(), "warning", "Unable to read data from node."); CompletableFuture<Map<String, Object>> toReturn = new CompletableFuture<>(); Future<?> future = EXECUTOR_SERVICE.submit( () -> { try { HttpClient client = clientFactory.createClient(summary.getUri().toURL()); HttpRequest nodeStatusReq = new HttpRequest(GET, "/se/grid/node/status"); HttpTracing.inject(tracer, span, nodeStatusReq); HttpResponse res = client.execute(nodeStatusReq); toReturn.complete(res.getStatus() == 200 ? json.toType(string(res), MAP_TYPE) : defaultResponse); } catch (IOException e) { e.printStackTrace(); toReturn.complete(defaultResponse); } }); SCHEDULED_SERVICE.schedule( () -> { if (!toReturn.isDone()) { toReturn.complete(defaultResponse); future.cancel(true); } }, remaining, MILLISECONDS); return toReturn; }) .collect(toList()); ImmutableMap.Builder<String, Object> value = ImmutableMap.builder(); value.put("ready", ready); value.put("message", message); value.put("nodes", nodeResults.stream() .map(summary -> { try { return summary.get(); } catch (ExecutionException e) { throw wrap(e); } catch (InterruptedException e) { Thread.currentThread().interrupt(); throw wrap(e); } }) .collect(toList())); HttpResponse res = new HttpResponse().setContent(utf8String(json.toJson( ImmutableMap.of("value", value.build())))); HTTP_RESPONSE.accept(span, res); return res; } finally { span.end(); } } private RuntimeException wrap(Exception e) { if (e instanceof InterruptedException) { Thread.currentThread().interrupt(); return new RuntimeException(e); } Throwable cause = e.getCause(); if (cause == null) { return e instanceof RuntimeException ? (RuntimeException) e : new RuntimeException(e); } return cause instanceof RuntimeException ? (RuntimeException) cause : new RuntimeException(cause); } }
[java] Don't write anything to stdout without need
java/server/src/org/openqa/selenium/grid/router/GridStatusHandler.java
[java] Don't write anything to stdout without need
Java
apache-2.0
b643e67e3788f63043e4e37bab8481445502bd94
0
RichardHightower/qbit,RichardHightower/qbit,RichardHightower/qbit,advantageous/qbit,advantageous/qbit,advantageous/qbit,advantageous/qbit
/** * **************************************************************************** * Copyright (c) 2015. Rick Hightower, Geoff Chandler * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * __ __ _ _____ _ _ * \ \ / / | | / ____| | | | | * \ \ /\ / /__| |__| (___ ___ ___| | _____| |_ * \ \/ \/ / _ \ '_ \\___ \ / _ \ / __| |/ / _ \ __| * \ /\ / __/ |_) |___) | (_) | (__| < __/ |_ * \/ \/ \___|_.__/_____/ \___/ \___|_|\_\___|\__| * _ _____ ____ _ _ * | |/ ____|/ __ \| \ | | * | | (___ | | | | \| | * _ | |\___ \| | | | . ` | * | |__| |____) | |__| | |\ | * \____/|_____/ \____/|_|_\_|_ * | __ \| ____|/ ____|__ __| * | |__) | |__ | (___ | | * | _ /| __| \___ \ | | * | | \ \| |____ ____) | | | * |_| \_\______|_____/ |_|___ _ * | \/ (_) / ____| (_) * | \ / |_ ___ _ __ ___| (___ ___ _ ____ ___ ___ ___ * | |\/| | |/ __| '__/ _ \\___ \ / _ \ '__\ \ / / |/ __/ _ \ * | | | | | (__| | | (_) |___) | __/ | \ V /| | (_| __/ * |_| |_|_|\___|_| \___/_____/ \___|_| \_/ |_|\___\___| * <p> * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! * http://rick-hightower.blogspot.com/2014/12/rise-of-machines-writing-high-speed.html * http://rick-hightower.blogspot.com/2014/12/quick-guide-to-programming-services-in.html * http://rick-hightower.blogspot.com/2015/01/quick-startClient-qbit-programming.html * http://rick-hightower.blogspot.com/2015/01/high-speed-soa.html * http://rick-hightower.blogspot.com/2015/02/qbit-event-bus.html * **************************************************************************** */ package io.advantageous.qbit.service.impl; import io.advantageous.boon.core.reflection.BeanUtils; import io.advantageous.boon.core.reflection.ClassMeta; import io.advantageous.qbit.Factory; import io.advantageous.qbit.GlobalConstants; import io.advantageous.qbit.client.BeforeMethodSent; import io.advantageous.qbit.client.ClientProxy; import io.advantageous.qbit.concurrent.PeriodicScheduler; import io.advantageous.qbit.events.EventManager; import io.advantageous.qbit.message.*; import io.advantageous.qbit.message.impl.MethodCallLocal; import io.advantageous.qbit.queue.*; import io.advantageous.qbit.service.*; import io.advantageous.qbit.system.QBitSystemManager; import io.advantageous.qbit.time.Duration; import io.advantageous.qbit.transforms.NoOpResponseTransformer; import io.advantageous.qbit.transforms.Transformer; import io.advantageous.qbit.util.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Collection; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static io.advantageous.qbit.QBit.factory; import static io.advantageous.qbit.service.ServiceContext.serviceContext; /** * @author rhightower on 2/18/15. */ public class BaseServiceQueueImpl implements ServiceQueue { protected static final ThreadLocal<ServiceQueue> serviceThreadLocal = new ThreadLocal<>(); protected final QBitSystemManager systemManager; protected final Logger logger = LoggerFactory.getLogger(ServiceQueueImpl.class); protected final boolean debug = GlobalConstants.DEBUG && logger.isDebugEnabled(); protected final Object service; protected final Queue<Response<Object>> responseQueue; protected final Queue<MethodCall<Object>> requestQueue; protected final Queue<Event<Object>> eventQueue; protected final QueueBuilder requestQueueBuilder; protected final QueueBuilder responseQueueBuilder; protected final boolean handleCallbacks; protected final ServiceMethodHandler serviceMethodHandler; protected final SendQueue<Response<Object>> responseSendQueue; private final Factory factory; private final BeforeMethodSent beforeMethodSent; private final Optional<EventManager> eventManager; private final boolean joinEventManager; private final AtomicBoolean started = new AtomicBoolean(false); private final BeforeMethodCall beforeMethodCall; private final BeforeMethodCall beforeMethodCallAfterTransform; private final AfterMethodCall afterMethodCall; private final AfterMethodCall afterMethodCallAfterTransform; private final CallbackManager callbackManager; private final QueueCallBackHandler queueCallBackHandler; protected volatile long lastResponseFlushTime = Timer.timer().now(); private Transformer<Request, Object> requestObjectTransformer = ServiceConstants.NO_OP_ARG_TRANSFORM; private Transformer<Response<Object>, Response> responseObjectTransformer = new NoOpResponseTransformer(); private AtomicBoolean failing = new AtomicBoolean(); public BaseServiceQueueImpl(final String rootAddress, final String serviceAddress, final Object service, final QueueBuilder requestQueueBuilder, final QueueBuilder responseQueueBuilder, final ServiceMethodHandler serviceMethodHandler, final Queue<Response<Object>> responseQueue, final boolean async, final boolean handleCallbacks, final QBitSystemManager systemManager, final BeforeMethodCall beforeMethodCall, final BeforeMethodCall beforeMethodCallAfterTransform, final AfterMethodCall afterMethodCall, final AfterMethodCall afterMethodCallAfterTransform, final QueueCallBackHandler queueCallBackHandler, final CallbackManager callbackManager, final BeforeMethodSent beforeMethodSent, final EventManager eventManager, final boolean joinEventManager) { this.eventManager = Optional.ofNullable(eventManager); this.joinEventManager = joinEventManager; this.beforeMethodSent = beforeMethodSent; this.beforeMethodCall = beforeMethodCall; this.beforeMethodCallAfterTransform = beforeMethodCallAfterTransform; this.afterMethodCall = afterMethodCall; this.afterMethodCallAfterTransform = afterMethodCallAfterTransform; this.callbackManager = callbackManager; if (queueCallBackHandler == null) { this.queueCallBackHandler = new QueueCallBackHandler() { @Override public void queueLimit() { } @Override public void queueEmpty() { } }; } else { this.queueCallBackHandler = queueCallBackHandler; } if (requestQueueBuilder == null) { this.requestQueueBuilder = new QueueBuilder(); } else { this.requestQueueBuilder = BeanUtils.copy(requestQueueBuilder); } if (responseQueueBuilder == null) { this.responseQueueBuilder = new QueueBuilder(); } else { this.responseQueueBuilder = BeanUtils.copy(responseQueueBuilder); } if (responseQueue == null) { logger.info("RESPONSE QUEUE WAS NULL CREATING ONE for service"); this.responseQueue = this.responseQueueBuilder.setName("Response Queue " + serviceMethodHandler.address()).build(); } else { this.responseQueue = responseQueue; } this.responseSendQueue = this.responseQueue.sendQueueWithAutoFlush(100, TimeUnit.MILLISECONDS); this.service = service; this.serviceMethodHandler = serviceMethodHandler; this.serviceMethodHandler.init(service, rootAddress, serviceAddress, responseSendQueue); this.eventQueue = this.requestQueueBuilder.setName("Event Queue" + serviceMethodHandler.address()).build(); this.handleCallbacks = handleCallbacks; this.requestQueue = initRequestQueue(serviceMethodHandler, async); this.systemManager = systemManager; this.factory = factory(); this.eventManager.ifPresent(em -> { em.joinService(BaseServiceQueueImpl.this); }); } public static ServiceQueue currentService() { return serviceThreadLocal.get(); } @Override public void start() { start(serviceMethodHandler, joinEventManager); } public ServiceQueue startServiceQueue() { start(serviceMethodHandler, joinEventManager); return this; } public ServiceQueue start(boolean joinEventManager) { start(serviceMethodHandler, joinEventManager); return this; } @Override public Queue<MethodCall<Object>> requestQueue() { return this.requestQueue; } @Override public Queue<Response<Object>> responseQueue() { return this.responseQueue; } protected Queue<MethodCall<Object>> initRequestQueue(final ServiceMethodHandler serviceMethodHandler, boolean async) { Queue<MethodCall<Object>> requestQueue; if (async) { requestQueue = this.requestQueueBuilder.setName("Send Queue " + serviceMethodHandler.address()).build(); } else { requestQueue = new Queue<MethodCall<Object>>() { @Override public ReceiveQueue<MethodCall<Object>> receiveQueue() { return null; } @Override public SendQueue<MethodCall<Object>> sendQueue() { return new SendQueue<MethodCall<Object>>() { @Override public boolean send(MethodCall<Object> item) { return doHandleMethodCall(item, serviceMethodHandler); } @Override public void sendAndFlush(MethodCall<Object> item) { doHandleMethodCall(item, serviceMethodHandler); } @SafeVarargs @Override public final void sendMany(MethodCall<Object>... items) { for (MethodCall<Object> item : items) { doHandleMethodCall(item, serviceMethodHandler); } } @Override public void sendBatch(Collection<MethodCall<Object>> items) { for (MethodCall<Object> item : items) { doHandleMethodCall(item, serviceMethodHandler); } } @Override public void sendBatch(Iterable<MethodCall<Object>> items) { for (MethodCall<Object> item : items) { doHandleMethodCall(item, serviceMethodHandler); } } @Override public boolean shouldBatch() { return false; } @Override public void flushSends() { } @Override public int size() { return 0; } }; } @Override public void startListener(ReceiveQueueListener<MethodCall<Object>> listener) { } @Override public void stop() { } @Override public int size() { return 0; } }; } return requestQueue; } public ServiceQueue startCallBackHandler() { if (!handleCallbacks) { /** Need to make this configurable. */ callbackManager.startReturnHandlerProcessor(this.responseQueue); return this; } else { throw new IllegalStateException("Unable to handle callbacks in a new thread when handleCallbacks is set"); } } public BaseServiceQueueImpl requestObjectTransformer(Transformer<Request, Object> requestObjectTransformer) { this.requestObjectTransformer = requestObjectTransformer; return this; } public BaseServiceQueueImpl responseObjectTransformer(Transformer<Response<Object>, Response> responseObjectTransformer) { this.responseObjectTransformer = responseObjectTransformer; return this; } /** * This method is where all of the action is. * * @param methodCall methodCall * @param serviceMethodHandler handler */ private boolean doHandleMethodCall(MethodCall<Object> methodCall, final ServiceMethodHandler serviceMethodHandler) { if (debug) { logger.debug("ServiceImpl::doHandleMethodCall() METHOD CALL" + methodCall); } if (callbackManager != null) { if (methodCall.hasCallback() && serviceMethodHandler.couldHaveCallback(methodCall.name())) { callbackManager.registerCallbacks(methodCall); } } //inputQueueListener.receive(methodCall); final boolean continueFlag[] = new boolean[1]; methodCall = beforeMethodProcessing(methodCall, continueFlag); if (continueFlag[0]) { if (debug) logger.debug("ServiceImpl::doHandleMethodCall() before handling stopped processing"); return false; } Response<Object> response = serviceMethodHandler.receiveMethodCall(methodCall); if (response != ServiceConstants.VOID) { if (!afterMethodCall.after(methodCall, response)) { return false; } //noinspection unchecked response = responseObjectTransformer.transform(response); if (!afterMethodCallAfterTransform.after(methodCall, response)) { return false; } if (debug) { if (response.body() instanceof Throwable) { logger.error("Unable to handle call ", ((Throwable) response.body())); } } if (!responseSendQueue.send(response)) { logger.error("Unable to send response {} for method {} for object {}", response, methodCall.name(), methodCall.objectName()); } } return false; } private void start(final ServiceMethodHandler serviceMethodHandler, final boolean joinEventManager) { if (started.get()) { logger.warn("Service {} already started. It will not start twice.", name()); return; } logger.info("Starting service {}", name()); started.set(true); final ReceiveQueue<Response<Object>> responseReceiveQueue = this.handleCallbacks ? responseQueue.receiveQueue() : null; final ReceiveQueue<Event<Object>> eventReceiveQueue = eventQueue.receiveQueue(); serviceThreadLocal.set(this); if (!(service instanceof EventManager)) { if (joinEventManager) { serviceContext().eventManager().joinService(this); } } flushEventManagerCalls(); serviceThreadLocal.set(null); requestQueue.startListener(new ReceiveQueueListener<MethodCall<Object>>() { @Override public void init() { serviceThreadLocal.set(BaseServiceQueueImpl.this); queueCallBackHandler.queueInit(); serviceMethodHandler.init(); } @Override public void receive(MethodCall<Object> methodCall) { queueCallBackHandler.beforeReceiveCalled(); doHandleMethodCall(methodCall, serviceMethodHandler); queueCallBackHandler.afterReceiveCalled(); } @Override public void empty() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.empty(); queueCallBackHandler.queueEmpty(); } @Override public void startBatch() { serviceThreadLocal.set(BaseServiceQueueImpl.this); serviceMethodHandler.startBatch(); queueCallBackHandler.queueStartBatch(); } @Override public void limit() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.limit(); queueCallBackHandler.queueLimit(); } @Override public void shutdown() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.shutdown(); queueCallBackHandler.queueShutdown(); serviceThreadLocal.set(null); } @Override public void idle() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.idle(); queueCallBackHandler.queueIdle(); if (callbackManager != null) { callbackManager.process(0); } serviceThreadLocal.set(null); } /** Such a small method with so much responsibility. */ public void handle() { manageResponseQueue(); handleCallBacks(responseReceiveQueue); handleEvents(eventReceiveQueue, serviceMethodHandler); } }); } private void handleEvents(ReceiveQueue<Event<Object>> eventReceiveQueue, ServiceMethodHandler serviceMethodHandler) { /* Handles the event processing. */ Event<Object> event = eventReceiveQueue.poll(); while (event != null) { serviceMethodHandler.handleEvent(event); event = eventReceiveQueue.poll(); } flushEventManagerCalls(); } private void handleCallBacks(ReceiveQueue<Response<Object>> responseReceiveQueue) { /* Handles the CallBacks if you have configured the service to handle its own callbacks. Callbacks can be handled in a separate thread or the same thread the manages the service. */ if (handleCallbacks) { Response<Object> response = responseReceiveQueue.poll(); while (response != null) { callbackManager.handleResponse(response); response = responseReceiveQueue.poll(); } } } private void flushEventManagerCalls() { final EventManager eventManager = factory.eventManagerProxy(); if (eventManager != null) { ServiceProxyUtils.flushServiceProxy(eventManager); factory.clearEventManagerProxy(); } } private void manageResponseQueue() { long now = Timer.timer().now(); if (now - lastResponseFlushTime > 50) { lastResponseFlushTime = now; responseSendQueue.flushSends(); } } private MethodCall<Object> beforeMethodProcessing(MethodCall<Object> methodCall, boolean[] continueFlag) { if (!beforeMethodCall.before(methodCall)) { continueFlag[0] = false; } if (requestObjectTransformer != null && requestObjectTransformer != ServiceConstants.NO_OP_ARG_TRANSFORM) { final Object arg = requestObjectTransformer.transform(methodCall); methodCall = MethodCallBuilder.transformed(methodCall, arg); } if (beforeMethodCallAfterTransform != null && beforeMethodCallAfterTransform != ServiceConstants.NO_OP_BEFORE_METHOD_CALL) { if (!beforeMethodCallAfterTransform.before(methodCall)) { continueFlag[0] = false; } } return methodCall; } @Override public SendQueue<MethodCall<Object>> requests() { return requestQueue.sendQueue(); } @Override public SendQueue<MethodCall<Object>> requestsWithAutoFlush(int flushInterval, TimeUnit timeUnit) { return requestQueue.sendQueueWithAutoFlush(flushInterval, timeUnit); } @Override public ReceiveQueue<Response<Object>> responses() { return responseQueue.receiveQueue(); } @Override public String name() { return serviceMethodHandler.name(); } @Override public String address() { return serviceMethodHandler.address(); } @Override public void stop() { started.set(false); try { if (requestQueue != null) requestQueue.stop(); } catch (Exception ex) { if (debug) logger.debug("Unable to stop request queue", ex); } try { if (responseQueue != null) responseQueue.stop(); } catch (Exception ex) { if (debug) logger.debug("Unable to stop response queues", ex); } if (systemManager != null) { this.systemManager.serviceShutDown(); this.systemManager.unregisterService(this); } if (!(service instanceof EventManager)) { if (joinEventManager) { serviceContext().eventManager().leaveEventBus(this); } } eventManager.ifPresent(em -> em.leaveEventBus(BaseServiceQueueImpl.this)); } @Override public Collection<String> addresses(String address) { return this.serviceMethodHandler.addresses(); } @Override public void flush() { lastResponseFlushTime = 0; manageResponseQueue(); } @Override public boolean failing() { return failing.get(); } @Override public boolean running() { return started.get(); } @Override public void setFailing() { failing.set(true); } @Override public void recover() { failing.set(false); } public Object service() { return service; } public <T> T createProxyWithAutoFlush(Class<T> serviceInterface, int interval, TimeUnit timeUnit) { final SendQueue<MethodCall<Object>> methodCallSendQueue = requestQueue.sendQueueWithAutoFlush(interval, timeUnit); methodCallSendQueue.start(); return proxy(serviceInterface, methodCallSendQueue); } @Override public <T> T createProxyWithAutoFlush(Class<T> serviceInterface, Duration duration) { return createProxyWithAutoFlush(serviceInterface, (int) duration.getDuration(), duration.getTimeUnit()); } public <T> T createProxyWithAutoFlush(final Class<T> serviceInterface, final PeriodicScheduler periodicScheduler, final int interval, final TimeUnit timeUnit) { final SendQueue<MethodCall<Object>> methodCallSendQueue = requestQueue.sendQueueWithAutoFlush(periodicScheduler, interval, timeUnit); methodCallSendQueue.start(); return proxy(serviceInterface, methodCallSendQueue); } public <T> T createProxy(Class<T> serviceInterface) { final SendQueue<MethodCall<Object>> methodCallSendQueue = requestQueue.sendQueue(); return proxy(serviceInterface, methodCallSendQueue); } private <T> void validateInterface(Class<T> serviceInterface) { if (!serviceInterface.isInterface()) { throw new IllegalStateException("Service Interface must be an interface " + serviceInterface.getName()); } final ClassMeta<T> classMeta = ClassMeta.classMeta(serviceInterface); final Method[] declaredMethods = classMeta.cls().getDeclaredMethods(); for (Method m : declaredMethods) { if (!(m.getReturnType() == void.class)) { throw new IllegalStateException("Async interface can only return void " + serviceInterface.getName()); } } } private <T> T proxy(final Class<T> serviceInterface, final SendQueue<MethodCall<Object>> methodCallSendQueue) { validateInterface(serviceInterface); final String uuid = serviceInterface.getName() + "::" + UUID.randomUUID().toString(); if (!started.get()) { logger.info("ServiceQueue::create(...), A proxy is being asked for a service that is not started ", name()); } InvocationHandler invocationHandler = new InvocationHandler() { private long messageId = 0; private long timestamp = Timer.timer().now(); private int times = 10; @Override public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable { if (method.getName().equals("toString")) { return "PROXY OBJECT " + address(); } if (method.getName().equals("clientProxyFlush")) { methodCallSendQueue.flushSends(); return null; } if (method.getName().equals("stop")) { methodCallSendQueue.stop(); return null; } messageId++; times--; if (times == 0) { timestamp = Timer.timer().now(); times = 10; } else { timestamp++; } if (beforeMethodSent == null) { /** TODO LEFT OFF HERE. */ final MethodCallLocal call = new MethodCallLocal(method.getName(), uuid, timestamp, messageId, args, null, null); methodCallSendQueue.send(call); } else { final String name = method.getName(); MethodCallBuilder methodCallBuilder = MethodCallBuilder.methodCallBuilder() .setLocal(true).setAddress(name) .setName(name).setReturnAddress(uuid) .setTimestamp(timestamp).setId(messageId) .setBodyArgs(args); beforeMethodSent.beforeMethodSent(methodCallBuilder); final MethodCall<Object> call = methodCallBuilder.build(); methodCallSendQueue.send(call); } return null; } }; final Object o = Proxy.newProxyInstance(serviceInterface.getClassLoader(), new Class[]{serviceInterface, ClientProxy.class}, invocationHandler ); //noinspection unchecked return (T) o; } @Override public SendQueue<Event<Object>> events() { return this.eventQueue.sendQueueWithAutoFlush(50, TimeUnit.MILLISECONDS); } @Override public String toString() { return "ServiceQueue{" + "service=" + service.getClass().getSimpleName() + '}'; } }
qbit/core/src/main/java/io/advantageous/qbit/service/impl/BaseServiceQueueImpl.java
/** * **************************************************************************** * Copyright (c) 2015. Rick Hightower, Geoff Chandler * <p> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * __ __ _ _____ _ _ * \ \ / / | | / ____| | | | | * \ \ /\ / /__| |__| (___ ___ ___| | _____| |_ * \ \/ \/ / _ \ '_ \\___ \ / _ \ / __| |/ / _ \ __| * \ /\ / __/ |_) |___) | (_) | (__| < __/ |_ * \/ \/ \___|_.__/_____/ \___/ \___|_|\_\___|\__| * _ _____ ____ _ _ * | |/ ____|/ __ \| \ | | * | | (___ | | | | \| | * _ | |\___ \| | | | . ` | * | |__| |____) | |__| | |\ | * \____/|_____/ \____/|_|_\_|_ * | __ \| ____|/ ____|__ __| * | |__) | |__ | (___ | | * | _ /| __| \___ \ | | * | | \ \| |____ ____) | | | * |_| \_\______|_____/ |_|___ _ * | \/ (_) / ____| (_) * | \ / |_ ___ _ __ ___| (___ ___ _ ____ ___ ___ ___ * | |\/| | |/ __| '__/ _ \\___ \ / _ \ '__\ \ / / |/ __/ _ \ * | | | | | (__| | | (_) |___) | __/ | \ V /| | (_| __/ * |_| |_|_|\___|_| \___/_____/ \___|_| \_/ |_|\___\___| * <p> * QBit - The Microservice lib for Java : JSON, WebSocket, REST. Be The Web! * http://rick-hightower.blogspot.com/2014/12/rise-of-machines-writing-high-speed.html * http://rick-hightower.blogspot.com/2014/12/quick-guide-to-programming-services-in.html * http://rick-hightower.blogspot.com/2015/01/quick-startClient-qbit-programming.html * http://rick-hightower.blogspot.com/2015/01/high-speed-soa.html * http://rick-hightower.blogspot.com/2015/02/qbit-event-bus.html * **************************************************************************** */ package io.advantageous.qbit.service.impl; import io.advantageous.boon.core.reflection.BeanUtils; import io.advantageous.boon.core.reflection.ClassMeta; import io.advantageous.qbit.Factory; import io.advantageous.qbit.GlobalConstants; import io.advantageous.qbit.client.BeforeMethodSent; import io.advantageous.qbit.client.ClientProxy; import io.advantageous.qbit.concurrent.PeriodicScheduler; import io.advantageous.qbit.events.EventManager; import io.advantageous.qbit.message.*; import io.advantageous.qbit.message.impl.MethodCallLocal; import io.advantageous.qbit.queue.*; import io.advantageous.qbit.service.*; import io.advantageous.qbit.system.QBitSystemManager; import io.advantageous.qbit.time.Duration; import io.advantageous.qbit.transforms.NoOpResponseTransformer; import io.advantageous.qbit.transforms.Transformer; import io.advantageous.qbit.util.Timer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.lang.reflect.InvocationHandler; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.util.Collection; import java.util.Optional; import java.util.UUID; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import static io.advantageous.qbit.QBit.factory; import static io.advantageous.qbit.service.ServiceContext.serviceContext; /** * @author rhightower on 2/18/15. */ public class BaseServiceQueueImpl implements ServiceQueue { protected static final ThreadLocal<ServiceQueue> serviceThreadLocal = new ThreadLocal<>(); protected final QBitSystemManager systemManager; protected final Logger logger = LoggerFactory.getLogger(ServiceQueueImpl.class); protected final boolean debug = GlobalConstants.DEBUG && logger.isDebugEnabled(); protected final Object service; protected final Queue<Response<Object>> responseQueue; protected final Queue<MethodCall<Object>> requestQueue; protected final Queue<Event<Object>> eventQueue; protected final QueueBuilder requestQueueBuilder; protected final QueueBuilder responseQueueBuilder; protected final boolean handleCallbacks; protected final ServiceMethodHandler serviceMethodHandler; protected final SendQueue<Response<Object>> responseSendQueue; private final Factory factory; private final BeforeMethodSent beforeMethodSent; private final Optional<EventManager> eventManager; private final boolean joinEventManager; private final AtomicBoolean started = new AtomicBoolean(false); private final BeforeMethodCall beforeMethodCall; private final BeforeMethodCall beforeMethodCallAfterTransform; private final AfterMethodCall afterMethodCall; private final AfterMethodCall afterMethodCallAfterTransform; private final CallbackManager callbackManager; private final QueueCallBackHandler queueCallBackHandler; protected volatile long lastResponseFlushTime = Timer.timer().now(); private Transformer<Request, Object> requestObjectTransformer = ServiceConstants.NO_OP_ARG_TRANSFORM; private Transformer<Response<Object>, Response> responseObjectTransformer = new NoOpResponseTransformer(); private AtomicBoolean failing = new AtomicBoolean(); public BaseServiceQueueImpl(final String rootAddress, final String serviceAddress, final Object service, final QueueBuilder requestQueueBuilder, final QueueBuilder responseQueueBuilder, final ServiceMethodHandler serviceMethodHandler, final Queue<Response<Object>> responseQueue, final boolean async, final boolean handleCallbacks, final QBitSystemManager systemManager, final BeforeMethodCall beforeMethodCall, final BeforeMethodCall beforeMethodCallAfterTransform, final AfterMethodCall afterMethodCall, final AfterMethodCall afterMethodCallAfterTransform, final QueueCallBackHandler queueCallBackHandler, final CallbackManager callbackManager, final BeforeMethodSent beforeMethodSent, final EventManager eventManager, final boolean joinEventManager) { this.eventManager = Optional.ofNullable(eventManager); this.joinEventManager = joinEventManager; this.beforeMethodSent = beforeMethodSent; this.beforeMethodCall = beforeMethodCall; this.beforeMethodCallAfterTransform = beforeMethodCallAfterTransform; this.afterMethodCall = afterMethodCall; this.afterMethodCallAfterTransform = afterMethodCallAfterTransform; this.callbackManager = callbackManager; if (queueCallBackHandler == null) { this.queueCallBackHandler = new QueueCallBackHandler() { @Override public void queueLimit() { } @Override public void queueEmpty() { } }; } else { this.queueCallBackHandler = queueCallBackHandler; } if (requestQueueBuilder == null) { this.requestQueueBuilder = new QueueBuilder(); } else { this.requestQueueBuilder = BeanUtils.copy(requestQueueBuilder); } if (responseQueueBuilder == null) { this.responseQueueBuilder = new QueueBuilder(); } else { this.responseQueueBuilder = BeanUtils.copy(responseQueueBuilder); } if (responseQueue == null) { logger.info("RESPONSE QUEUE WAS NULL CREATING ONE for service"); this.responseQueue = this.responseQueueBuilder.setName("Response Queue " + serviceMethodHandler.address()).build(); } else { this.responseQueue = responseQueue; } this.responseSendQueue = this.responseQueue.sendQueueWithAutoFlush(100, TimeUnit.MILLISECONDS); this.service = service; this.serviceMethodHandler = serviceMethodHandler; this.serviceMethodHandler.init(service, rootAddress, serviceAddress, responseSendQueue); this.eventQueue = this.requestQueueBuilder.setName("Event Queue" + serviceMethodHandler.address()).build(); this.handleCallbacks = handleCallbacks; this.requestQueue = initRequestQueue(serviceMethodHandler, async); this.systemManager = systemManager; this.factory = factory(); this.eventManager.ifPresent(em -> { em.joinService(BaseServiceQueueImpl.this); }); } public static ServiceQueue currentService() { return serviceThreadLocal.get(); } @Override public void start() { start(serviceMethodHandler, joinEventManager); } public ServiceQueue startServiceQueue() { start(serviceMethodHandler, joinEventManager); return this; } public ServiceQueue start(boolean joinEventManager) { start(serviceMethodHandler, joinEventManager); return this; } @Override public Queue<MethodCall<Object>> requestQueue() { return this.requestQueue; } @Override public Queue<Response<Object>> responseQueue() { return this.responseQueue; } protected Queue<MethodCall<Object>> initRequestQueue(final ServiceMethodHandler serviceMethodHandler, boolean async) { Queue<MethodCall<Object>> requestQueue; if (async) { requestQueue = this.requestQueueBuilder.setName("Send Queue " + serviceMethodHandler.address()).build(); } else { requestQueue = new Queue<MethodCall<Object>>() { @Override public ReceiveQueue<MethodCall<Object>> receiveQueue() { return null; } @Override public SendQueue<MethodCall<Object>> sendQueue() { return new SendQueue<MethodCall<Object>>() { @Override public boolean send(MethodCall<Object> item) { return doHandleMethodCall(item, serviceMethodHandler); } @Override public void sendAndFlush(MethodCall<Object> item) { doHandleMethodCall(item, serviceMethodHandler); } @SafeVarargs @Override public final void sendMany(MethodCall<Object>... items) { for (MethodCall<Object> item : items) { doHandleMethodCall(item, serviceMethodHandler); } } @Override public void sendBatch(Collection<MethodCall<Object>> items) { for (MethodCall<Object> item : items) { doHandleMethodCall(item, serviceMethodHandler); } } @Override public void sendBatch(Iterable<MethodCall<Object>> items) { for (MethodCall<Object> item : items) { doHandleMethodCall(item, serviceMethodHandler); } } @Override public boolean shouldBatch() { return false; } @Override public void flushSends() { } @Override public int size() { return 0; } }; } @Override public void startListener(ReceiveQueueListener<MethodCall<Object>> listener) { } @Override public void stop() { } @Override public int size() { return 0; } }; } return requestQueue; } public ServiceQueue startCallBackHandler() { if (!handleCallbacks) { /** Need to make this configurable. */ callbackManager.startReturnHandlerProcessor(this.responseQueue); return this; } else { throw new IllegalStateException("Unable to handle callbacks in a new thread when handleCallbacks is set"); } } public BaseServiceQueueImpl requestObjectTransformer(Transformer<Request, Object> requestObjectTransformer) { this.requestObjectTransformer = requestObjectTransformer; return this; } public BaseServiceQueueImpl responseObjectTransformer(Transformer<Response<Object>, Response> responseObjectTransformer) { this.responseObjectTransformer = responseObjectTransformer; return this; } /** * This method is where all of the action is. * * @param methodCall methodCall * @param serviceMethodHandler handler */ private boolean doHandleMethodCall(MethodCall<Object> methodCall, final ServiceMethodHandler serviceMethodHandler) { if (debug) { logger.debug("ServiceImpl::doHandleMethodCall() METHOD CALL" + methodCall); } if (callbackManager != null) { if (methodCall.hasCallback() && serviceMethodHandler.couldHaveCallback(methodCall.name())) { callbackManager.registerCallbacks(methodCall); } } //inputQueueListener.receive(methodCall); final boolean continueFlag[] = new boolean[1]; methodCall = beforeMethodProcessing(methodCall, continueFlag); if (continueFlag[0]) { if (debug) logger.debug("ServiceImpl::doHandleMethodCall() before handling stopped processing"); return false; } Response<Object> response = serviceMethodHandler.receiveMethodCall(methodCall); if (response != ServiceConstants.VOID) { if (!afterMethodCall.after(methodCall, response)) { return false; } //noinspection unchecked response = responseObjectTransformer.transform(response); if (!afterMethodCallAfterTransform.after(methodCall, response)) { return false; } if (debug) { if (response.body() instanceof Throwable) { logger.error("Unable to handle call ", ((Throwable) response.body())); } } if (!responseSendQueue.send(response)) { logger.error("Unable to send response {} for method {} for object {}", response, methodCall.name(), methodCall.objectName()); } } return false; } private void start(final ServiceMethodHandler serviceMethodHandler, final boolean joinEventManager) { if (started.get()) { logger.warn("Service {} already started. It will not start twice.", name()); return; } logger.info("Starting service {}", name()); started.set(true); final ReceiveQueue<Response<Object>> responseReceiveQueue = this.handleCallbacks ? responseQueue.receiveQueue() : null; final ReceiveQueue<Event<Object>> eventReceiveQueue = eventQueue.receiveQueue(); serviceThreadLocal.set(this); if (!(service instanceof EventManager)) { if (joinEventManager) { serviceContext().eventManager().joinService(this); } } flushEventManagerCalls(); serviceThreadLocal.set(null); requestQueue.startListener(new ReceiveQueueListener<MethodCall<Object>>() { @Override public void init() { serviceThreadLocal.set(BaseServiceQueueImpl.this); queueCallBackHandler.queueInit(); serviceMethodHandler.init(); } @Override public void receive(MethodCall<Object> methodCall) { queueCallBackHandler.beforeReceiveCalled(); doHandleMethodCall(methodCall, serviceMethodHandler); queueCallBackHandler.afterReceiveCalled(); } @Override public void empty() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.empty(); queueCallBackHandler.queueEmpty(); } @Override public void startBatch() { serviceThreadLocal.set(BaseServiceQueueImpl.this); serviceMethodHandler.startBatch(); queueCallBackHandler.queueStartBatch(); } @Override public void limit() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.limit(); queueCallBackHandler.queueLimit(); } @Override public void shutdown() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.shutdown(); queueCallBackHandler.queueShutdown(); serviceThreadLocal.set(null); } @Override public void idle() { serviceThreadLocal.set(BaseServiceQueueImpl.this); handle(); serviceMethodHandler.idle(); queueCallBackHandler.queueIdle(); if (callbackManager != null) { callbackManager.process(0); } serviceThreadLocal.set(null); } /** Such a small method with so much responsibility. */ public void handle() { manageResponseQueue(); handleCallBacks(responseReceiveQueue); handleEvents(eventReceiveQueue, serviceMethodHandler); } }); } private void handleEvents(ReceiveQueue<Event<Object>> eventReceiveQueue, ServiceMethodHandler serviceMethodHandler) { /* Handles the event processing. */ Event<Object> event = eventReceiveQueue.poll(); while (event != null) { serviceMethodHandler.handleEvent(event); event = eventReceiveQueue.poll(); } flushEventManagerCalls(); } private void handleCallBacks(ReceiveQueue<Response<Object>> responseReceiveQueue) { /* Handles the CallBacks if you have configured the service to handle its own callbacks. Callbacks can be handled in a separate thread or the same thread the manages the service. */ if (handleCallbacks) { Response<Object> response = responseReceiveQueue.poll(); while (response != null) { callbackManager.handleResponse(response); response = responseReceiveQueue.poll(); } } } private void flushEventManagerCalls() { final EventManager eventManager = factory.eventManagerProxy(); if (eventManager != null) { ServiceProxyUtils.flushServiceProxy(eventManager); factory.clearEventManagerProxy(); } } private void manageResponseQueue() { long now = Timer.timer().now(); if (now - lastResponseFlushTime > 50) { lastResponseFlushTime = now; responseSendQueue.flushSends(); } } private MethodCall<Object> beforeMethodProcessing(MethodCall<Object> methodCall, boolean[] continueFlag) { if (!beforeMethodCall.before(methodCall)) { continueFlag[0] = false; } if (requestObjectTransformer != null && requestObjectTransformer != ServiceConstants.NO_OP_ARG_TRANSFORM) { final Object arg = requestObjectTransformer.transform(methodCall); methodCall = MethodCallBuilder.transformed(methodCall, arg); } if (beforeMethodCallAfterTransform != null && beforeMethodCallAfterTransform != ServiceConstants.NO_OP_BEFORE_METHOD_CALL) { if (!beforeMethodCallAfterTransform.before(methodCall)) { continueFlag[0] = false; } } return methodCall; } @Override public SendQueue<MethodCall<Object>> requests() { return requestQueue.sendQueue(); } @Override public SendQueue<MethodCall<Object>> requestsWithAutoFlush(int flushInterval, TimeUnit timeUnit) { return requestQueue.sendQueueWithAutoFlush(flushInterval, timeUnit); } @Override public ReceiveQueue<Response<Object>> responses() { return responseQueue.receiveQueue(); } @Override public String name() { return serviceMethodHandler.name(); } @Override public String address() { return serviceMethodHandler.address(); } @Override public void stop() { started.set(false); try { if (requestQueue != null) requestQueue.stop(); } catch (Exception ex) { if (debug) logger.debug("Unable to stop request queue", ex); } try { if (responseQueue != null) responseQueue.stop(); } catch (Exception ex) { if (debug) logger.debug("Unable to stop response queues", ex); } if (systemManager != null) { this.systemManager.serviceShutDown(); this.systemManager.unregisterService(this); } if (!(service instanceof EventManager)) { if (joinEventManager) { serviceContext().eventManager().leaveEventBus(this); } } eventManager.ifPresent(em -> em.leaveEventBus(BaseServiceQueueImpl.this)); } @Override public Collection<String> addresses(String address) { return this.serviceMethodHandler.addresses(); } @Override public void flush() { lastResponseFlushTime = 0; manageResponseQueue(); } @Override public boolean failing() { return failing.get(); } @Override public boolean running() { return started.get(); } @Override public void setFailing() { failing.set(true); } @Override public void recover() { failing.set(false); } public Object service() { return service; } public <T> T createProxyWithAutoFlush(Class<T> serviceInterface, int interval, TimeUnit timeUnit) { final SendQueue<MethodCall<Object>> methodCallSendQueue = requestQueue.sendQueueWithAutoFlush(interval, timeUnit); methodCallSendQueue.start(); return proxy(serviceInterface, methodCallSendQueue); } @Override public <T> T createProxyWithAutoFlush(Class<T> serviceInterface, Duration duration) { return createProxyWithAutoFlush(serviceInterface, (int) duration.getDuration(), duration.getTimeUnit()); } public <T> T createProxyWithAutoFlush(final Class<T> serviceInterface, final PeriodicScheduler periodicScheduler, final int interval, final TimeUnit timeUnit) { final SendQueue<MethodCall<Object>> methodCallSendQueue = requestQueue.sendQueueWithAutoFlush(periodicScheduler, interval, timeUnit); methodCallSendQueue.start(); return proxy(serviceInterface, methodCallSendQueue); } public <T> T createProxy(Class<T> serviceInterface) { final SendQueue<MethodCall<Object>> methodCallSendQueue = requestQueue.sendQueue(); return proxy(serviceInterface, methodCallSendQueue); } private <T> void validateInterface(Class<T> serviceInterface) { if (!serviceInterface.isInterface()) { throw new IllegalStateException("Service Interface must be an interface " + serviceInterface.getName()); } final ClassMeta<T> classMeta = ClassMeta.classMeta(serviceInterface); final Method[] declaredMethods = classMeta.cls().getDeclaredMethods(); for (Method m : declaredMethods) { if (!(m.getReturnType() == void.class)) { throw new IllegalStateException("Async interface can only return void " + serviceInterface.getName()); } } } private <T> T proxy(final Class<T> serviceInterface, final SendQueue<MethodCall<Object>> methodCallSendQueue) { validateInterface(serviceInterface); final String uuid = serviceInterface.getName() + "::" + UUID.randomUUID().toString(); if (!started.get()) { logger.info("ServiceQueue::create(...), A proxy is being asked for a service that is not started ", name()); } InvocationHandler invocationHandler = new InvocationHandler() { private long messageId = 0; private long timestamp = Timer.timer().now(); private int times = 10; @Override public Object invoke(final Object proxy, final Method method, final Object[] args) throws Throwable { if (method.getName().equals("toString")) { return "PROXY OBJECT " + address(); } if (method.getName().equals("clientProxyFlush")) { methodCallSendQueue.flushSends(); return null; } if (method.getName().equals("stop")) { methodCallSendQueue.stop(); return null; } messageId++; times--; if (times == 0) { timestamp = Timer.timer().now(); times = 10; } else { timestamp++; } if (beforeMethodSent == null) { final MethodCallLocal call = new MethodCallLocal(method.getName(), uuid, timestamp, messageId, args, null, null); methodCallSendQueue.send(call); } else { final String name = method.getName(); MethodCallBuilder methodCallBuilder = MethodCallBuilder.methodCallBuilder() .setLocal(true).setAddress(name) .setName(name).setReturnAddress(uuid) .setTimestamp(timestamp).setId(messageId) .setBodyArgs(args); beforeMethodSent.beforeMethodSent(methodCallBuilder); final MethodCall<Object> call = methodCallBuilder.build(); methodCallSendQueue.send(call); } return null; } }; final Object o = Proxy.newProxyInstance(serviceInterface.getClassLoader(), new Class[]{serviceInterface, ClientProxy.class}, invocationHandler ); //noinspection unchecked return (T) o; } @Override public SendQueue<Event<Object>> events() { return this.eventQueue.sendQueueWithAutoFlush(50, TimeUnit.MILLISECONDS); } @Override public String toString() { return "ServiceQueue{" + "service=" + service.getClass().getSimpleName() + '}'; } }
left off here.
qbit/core/src/main/java/io/advantageous/qbit/service/impl/BaseServiceQueueImpl.java
left off here.
Java
apache-2.0
b3bc1bd0c247e0d143854fc4e795fdbb2ee2233b
0
DataSketches/sketches-core,pjain1/sketches-core,edoliberty/sketches-core,ivanliu/sketches-core
/* * Copyright 2015, Yahoo! Inc. * Licensed under the terms of the Apache License 2.0. See LICENSE file at the project root for terms. */ package com.yahoo.sketches.theta; import com.yahoo.sketches.Util; import com.yahoo.sketches.memory.Memory; import com.yahoo.sketches.memory.NativeMemory; import org.testng.annotations.Test; import static com.yahoo.sketches.theta.ForwardCompatibilityTest.convertSerV3toSerV1; import static com.yahoo.sketches.theta.Sketches.getMaxCompactSketchBytes; import static com.yahoo.sketches.theta.Sketches.getMaxIntersectionBytes; import static com.yahoo.sketches.theta.Sketches.getMaxUnionBytes; import static com.yahoo.sketches.theta.Sketches.getMaxUpdateSketchBytes; import static com.yahoo.sketches.theta.Sketches.getSerializationVersion; import static com.yahoo.sketches.theta.Sketches.heapifySetOperation; import static com.yahoo.sketches.theta.Sketches.heapifySketch; import static com.yahoo.sketches.theta.Sketches.setOperationBuilder; import static com.yahoo.sketches.theta.Sketches.updateSketchBuilder; import static com.yahoo.sketches.theta.Sketches.wrapSetOperation; import static com.yahoo.sketches.theta.Sketches.wrapSketch; import static org.testng.Assert.*; public class SketchesTest { private static Memory getCompactSketch(int k, int from, int to) { UpdateSketch.Builder bldr = updateSketchBuilder(); UpdateSketch sk1 = bldr.build(k); for (int i=from; i<to; i++) sk1.update(i); CompactSketch csk = sk1.compact(true, null); byte[] sk1bytes = csk.toByteArray(); NativeMemory mem = new NativeMemory(sk1bytes); return mem; } @Test public void checkSketchMethods() { int k = 1024; Memory mem = getCompactSketch(k, 0, k); CompactSketch csk2 = (CompactSketch)heapifySketch(mem); assertEquals((int)csk2.getEstimate(), k); csk2 = (CompactSketch)heapifySketch(mem, Util.DEFAULT_UPDATE_SEED); assertEquals((int)csk2.getEstimate(), k); csk2 = (CompactSketch)wrapSketch(mem); assertEquals((int)csk2.getEstimate(), k); csk2 = (CompactSketch)wrapSketch(mem, Util.DEFAULT_UPDATE_SEED); assertEquals((int)csk2.getEstimate(), k); } @Test public void checkSetOpMethods() { int k = 1024; Memory mem1 = getCompactSketch(k, 0, k); Memory mem2 = getCompactSketch(k, k/2, 3*k/2); SetOperation.Builder bldr = setOperationBuilder(); Union union = bldr.buildUnion(2*k); union.update(mem1); CompactSketch cSk = union.getResult(true, null); assertEquals((int)cSk.getEstimate(), k); union.update(mem2); cSk = union.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); byte[] ubytes = union.toByteArray(); NativeMemory uMem = new NativeMemory(ubytes); Union union2 = (Union)heapifySetOperation(uMem); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); union2 = (Union)heapifySetOperation(uMem, Util.DEFAULT_UPDATE_SEED); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); union2 = (Union)wrapSetOperation(uMem); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); union2 = (Union)wrapSetOperation(uMem, Util.DEFAULT_UPDATE_SEED); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); int serVer = getSerializationVersion(uMem); assertEquals(serVer, 3); } @Test public void checkUtilMethods() { int k = 1024; int maxUnionBytes = getMaxUnionBytes(k); assertEquals(2*k*8+32, maxUnionBytes); int maxInterBytes = getMaxIntersectionBytes(k); assertEquals(2*k*8+24, maxInterBytes); int maxCompSkBytes = getMaxCompactSketchBytes(k+1); assertEquals(24+(k+1)*8, maxCompSkBytes); int maxSkBytes = getMaxUpdateSketchBytes(k); assertEquals(24+2*k*8, maxSkBytes); } @Test public void checkStaticEstimators() { int k = 4096; int u = 4*k; Memory srcMem = getCompactSketch(k, 0, u); double est = Sketches.getEstimate(srcMem); assertEquals(est, u, 0.05*u); double rse = 1.0/Math.sqrt(k); double ub = Sketches.getUpperBound(1, srcMem); assertEquals(ub, est+rse, 0.05*u); double lb = Sketches.getLowerBound(1, srcMem); assertEquals(lb, est-rse, 0.05*u); Memory memV1 = convertSerV3toSerV1(srcMem); boolean empty = Sketches.getEmpty(memV1); assertFalse(empty); Memory emptyMemV3 = getCompactSketch(k, 0, 0); assertEquals(Sketches.getRetainedEntries(emptyMemV3), 0); assertEquals(Sketches.getThetaLong(emptyMemV3), Long.MAX_VALUE); Memory emptyMemV1 = convertSerV3toSerV1(emptyMemV3); empty = Sketches.getEmpty(emptyMemV1); assertTrue(empty); } @Test public void printlnTest() { println("Test"); } /** * @param s value to print */ static void println(String s) { //System.out.println(s); //disable here } }
src/test/java/com/yahoo/sketches/theta/SketchesTest.java
/* * Copyright 2015, Yahoo! Inc. * Licensed under the terms of the Apache License 2.0. See LICENSE file at the project root for terms. */ package com.yahoo.sketches.theta; import com.yahoo.sketches.Util; import com.yahoo.sketches.memory.Memory; import com.yahoo.sketches.memory.NativeMemory; import org.testng.annotations.Test; import static com.yahoo.sketches.theta.ForwardCompatibilityTest.convertSerV3toSerV1; import static com.yahoo.sketches.theta.Sketches.getMaxCompactSketchBytes; import static com.yahoo.sketches.theta.Sketches.getMaxIntersectionBytes; import static com.yahoo.sketches.theta.Sketches.getMaxUnionBytes; import static com.yahoo.sketches.theta.Sketches.getMaxUpdateSketchBytes; import static com.yahoo.sketches.theta.Sketches.getSerializationVersion; import static com.yahoo.sketches.theta.Sketches.heapifySetOperation; import static com.yahoo.sketches.theta.Sketches.heapifySketch; import static com.yahoo.sketches.theta.Sketches.setOperationBuilder; import static com.yahoo.sketches.theta.Sketches.updateSketchBuilder; import static com.yahoo.sketches.theta.Sketches.wrapSetOperation; import static com.yahoo.sketches.theta.Sketches.wrapSketch; import static org.testng.Assert.*; public class SketchesTest { private static Memory getCompactSketch(int k, int from, int to) { UpdateSketch.Builder bldr = updateSketchBuilder(); UpdateSketch sk1 = bldr.build(k); for (int i=from; i<to; i++) sk1.update(i); CompactSketch csk = sk1.compact(true, null); byte[] sk1bytes = csk.toByteArray(); NativeMemory mem = new NativeMemory(sk1bytes); return mem; } @Test public void checkSketchMethods() { int k = 1024; Memory mem = getCompactSketch(k, 0, k); CompactSketch csk2 = (CompactSketch)heapifySketch(mem); assertEquals((int)csk2.getEstimate(), k); csk2 = (CompactSketch)heapifySketch(mem, Util.DEFAULT_UPDATE_SEED); assertEquals((int)csk2.getEstimate(), k); csk2 = (CompactSketch)wrapSketch(mem); assertEquals((int)csk2.getEstimate(), k); csk2 = (CompactSketch)wrapSketch(mem, Util.DEFAULT_UPDATE_SEED); assertEquals((int)csk2.getEstimate(), k); } @Test public void checkSetOpMethods() { int k = 1024; Memory mem1 = getCompactSketch(k, 0, k); Memory mem2 = getCompactSketch(k, k/2, 3*k/2); SetOperation.Builder bldr = setOperationBuilder(); Union union = bldr.buildUnion(2*k); union.update(mem1); CompactSketch cSk = union.getResult(true, null); assertEquals((int)cSk.getEstimate(), k); union.update(mem2); cSk = union.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); byte[] ubytes = union.toByteArray(); NativeMemory uMem = new NativeMemory(ubytes); Union union2 = (Union)heapifySetOperation(uMem); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); union2 = (Union)heapifySetOperation(uMem, Util.DEFAULT_UPDATE_SEED); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); union2 = (Union)wrapSetOperation(uMem); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); union2 = (Union)wrapSetOperation(uMem, Util.DEFAULT_UPDATE_SEED); cSk = union2.getResult(true, null); assertEquals((int)cSk.getEstimate(), 3*k/2); int serVer = getSerializationVersion(uMem); assertEquals(serVer, 3); } @Test public void checkUtilMethods() { int k = 1024; int maxUnionBytes = getMaxUnionBytes(k); assertEquals(2*k*8+32, maxUnionBytes); int maxInterBytes = getMaxIntersectionBytes(k); assertEquals(2*k*8+24, maxInterBytes); int maxCompSkBytes = getMaxCompactSketchBytes(k+1); assertEquals(24+(k+1)*8, maxCompSkBytes); int maxSkBytes = getMaxUpdateSketchBytes(k); assertEquals(24+2*k*8, maxSkBytes); } @Test public void checkStaticEstimators() { int k = 4096; int u = 4*k; Memory srcMem = getCompactSketch(k, 0, u); double est = Sketches.getEstimate(srcMem); assertEquals(est, u, 0.05*u); double rse = 1.0/Math.sqrt(k); double ub = Sketches.getUpperBound(1, srcMem); assertEquals(ub, est+rse, 0.05*u); double lb = Sketches.getLowerBound(1, srcMem); assertEquals(lb, est-rse, 0.05*u); Memory memV1 = convertSerV3toSerV1(srcMem); boolean empty = Sketches.getEmpty(memV1); assertFalse(empty); memV1 = convertSerV3toSerV1(getCompactSketch(k, 0, 0)); empty = Sketches.getEmpty(memV1); assertTrue(empty); } @Test public void printlnTest() { println("Test"); } /** * @param s value to print */ static void println(String s) { //System.out.println(s); //disable here } }
Update unit test for static memory estimators in Sketches.
src/test/java/com/yahoo/sketches/theta/SketchesTest.java
Update unit test for static memory estimators in Sketches.
Java
apache-2.0
cc8ef9af5f130dad523b1f9887d3d5f8409ba59d
0
ox-it/wl-course-signup,ox-it/wl-course-signup,ox-it/wl-course-signup,ox-it/wl-course-signup,ox-it/wl-course-signup
package uk.ac.ox.oucs.vle; import java.io.IOException; import java.io.OutputStream; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.ext.ContextResolver; import org.codehaus.jackson.JsonEncoding; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.SerializationConfig; import org.codehaus.jackson.map.annotate.JsonSerialize; import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; import org.codehaus.jackson.map.type.TypeFactory; import uk.ac.ox.oucs.vle.CourseSignupService.Range; @Path("/course/") public class CourseResource { private CourseSignupService courseService; private JsonFactory jsonFactory; private ObjectMapper objectMapper; public CourseResource(@Context ContextResolver<Object> resolver) { this.courseService = (CourseSignupService) resolver.getContext(CourseSignupService.class); jsonFactory = new JsonFactory(); objectMapper = new ObjectMapper(); objectMapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); objectMapper.configure(SerializationConfig.Feature.USE_STATIC_TYPING, true); objectMapper.getSerializationConfig().setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); } @Path("/{id}") @GET @Produces(MediaType.APPLICATION_JSON) public StreamingOutput getCourse(@PathParam("id") final String courseId, @QueryParam("range") final Range range) { final CourseGroup course = courseService.getCourseGroup(courseId, range); if (course == null) { throw new WebApplicationException(Response.Status.NOT_FOUND); } return new StreamingOutput() { public void write(OutputStream output) throws IOException, WebApplicationException { objectMapper.writeValue(output, course); } }; } @Path("/all") @GET @Produces(MediaType.APPLICATION_JSON) public StreamingOutput getCourses(@QueryParam("range") final Range range) { final List<CourseGroup> groups = courseService.search(""); if (groups == null) { throw new WebApplicationException(Response.Status.NOT_FOUND); } return new StreamingOutput() { public void write(OutputStream output) throws IOException, WebApplicationException { objectMapper.typedWriter(TypeFactory.collectionType(List.class, CourseGroup.class)).writeValue(output, groups); } }; } /** * This gets all the courses for a department that have upcoming * parts. * @param deptId The department to load the courses for. * @return An array of jsTree nodes. */ @Path("/dept/{deptId}") @Produces(MediaType.APPLICATION_JSON) @GET public StreamingOutput getCoursesUpcoming(@PathParam("deptId") final String deptId, @QueryParam("components") final Range range) { final List<CourseGroup> courses = courseService.getCourseGroups(deptId, range); return new GroupsStreamingOutput(courses, deptId, range.name()); } @Path("/admin") @GET @Produces(MediaType.APPLICATION_JSON) public Response getAdminCourse() throws JsonGenerationException, JsonMappingException, IOException { List <CourseGroup> groups = courseService.getAdministering(); // TODO Just return the coursegroups (no nested objects). return Response.ok(objectMapper.typedWriter(TypeFactory.collectionType(List.class, CourseGroup.class)).writeValueAsString(groups)).build(); } @Path("/search") @GET @Produces(MediaType.APPLICATION_JSON) public Response setCourses(@QueryParam("terms") String terms) throws JsonGenerationException, JsonMappingException, IOException { if (terms == null) { throw new WebApplicationException(); } List<CourseGroup> groups = courseService.search(terms); return Response.ok(objectMapper.typedWriter(TypeFactory.collectionType(List.class, CourseGroup.class)).writeValueAsString(groups)).build(); } /** * Formats a duration sensibly. * @param remaining Time remaining in milliseconds. * @return a String roughly representing the durnation. */ private String formatDuration(long remaining) { if (remaining < 1000) { return "< 1 second"; } else if (remaining < 60000) { return remaining / 1000 + " seconds"; } else if (remaining < 3600000) { return remaining / 60000 + " minutes"; } else if (remaining < 86400000) { return remaining / 3600000 + " hours"; } else { return remaining / 86400000 + " days"; } } private String summary(Date now, CourseGroup courseGroup) { // Calculate the summary based on the available components. if (courseGroup.getComponents().isEmpty()) { return "none available"; } Date nextOpen = new Date(Long.MAX_VALUE); Date willClose = new Date(0); boolean isOneOpen = false; boolean isOneBookable = false; boolean areSomePlaces = false; for (CourseComponent component: courseGroup.getComponents()) { if (!isOneBookable) { isOneBookable = component.getBookable(); } // Check if component is the earliest one opening in the future. boolean isGoingToOpen = component.getOpens().after(now) && component.getOpens().before(nextOpen); if (isGoingToOpen) { nextOpen = component.getOpens(); } // Check if the component is open and is open for the longest. if (component.getOpens().before(now) && component.getCloses().after(willClose)) { willClose = component.getCloses(); } boolean isOpen = component.getOpens().before(now) && component.getCloses().after(now); if (!isOneOpen && isOpen) { isOneOpen = true; } if (isOpen) { if (component.getPlaces() > 0) { areSomePlaces = true; } } } String detail = null; if (!isOneBookable) { return null; // No signup available. } if (isOneOpen) { if (areSomePlaces) { long remaining = willClose.getTime() - now.getTime(); detail = "close in "+ formatDuration(remaining); } else { detail = "full"; } } else { if (nextOpen.getTime() == Long.MAX_VALUE) { return null; // Didn't find one open } long until = nextOpen.getTime() - now.getTime(); detail = "open in "+ formatDuration(until); } return detail; } private class GroupsStreamingOutput implements StreamingOutput { private final List<CourseGroup> courses; private final String deptId; private final String range; private GroupsStreamingOutput(List<CourseGroup> courses, String deptId, String range) { this.courses = courses; this.deptId = deptId; this.range = range; } public void write(OutputStream out) throws IOException { Date now = courseService.getNow(); JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8); gen.writeStartObject(); gen.writeObjectField("dept", deptId); gen.writeObjectField("range", range); gen.writeArrayFieldStart("tree"); for (CourseGroup courseGroup : courses) { gen.writeStartObject(); gen.writeObjectFieldStart("attr"); gen.writeObjectField("id", courseGroup.getId()); gen.writeEndObject(); String detail = summary(now, courseGroup); gen.writeObjectField("data", courseGroup.getTitle() + (detail == null?"":(" ("+detail+")")) ); gen.writeEndObject(); } gen.writeEndArray(); gen.writeEndObject(); gen.close(); } } }
tool/src/main/java/uk/ac/ox/oucs/vle/CourseResource.java
package uk.ac.ox.oucs.vle; import java.io.IOException; import java.io.OutputStream; import java.text.SimpleDateFormat; import java.util.Date; import java.util.List; import javax.ws.rs.GET; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.StreamingOutput; import javax.ws.rs.ext.ContextResolver; import org.codehaus.jackson.JsonEncoding; import org.codehaus.jackson.JsonFactory; import org.codehaus.jackson.JsonGenerationException; import org.codehaus.jackson.JsonGenerator; import org.codehaus.jackson.map.JsonMappingException; import org.codehaus.jackson.map.ObjectMapper; import org.codehaus.jackson.map.SerializationConfig; import org.codehaus.jackson.map.annotate.JsonSerialize; import org.codehaus.jackson.map.annotate.JsonSerialize.Inclusion; import org.codehaus.jackson.map.type.TypeFactory; import uk.ac.ox.oucs.vle.CourseSignupService.Range; @Path("/course/") public class CourseResource { private CourseSignupService courseService; private JsonFactory jsonFactory; private ObjectMapper objectMapper; public CourseResource(@Context ContextResolver<Object> resolver) { this.courseService = (CourseSignupService) resolver.getContext(CourseSignupService.class); jsonFactory = new JsonFactory(); objectMapper = new ObjectMapper(); objectMapper.configure(SerializationConfig.Feature.INDENT_OUTPUT, true); objectMapper.configure(SerializationConfig.Feature.USE_STATIC_TYPING, true); objectMapper.getSerializationConfig().setSerializationInclusion(JsonSerialize.Inclusion.NON_NULL); } @Path("/{id}") @GET @Produces(MediaType.APPLICATION_JSON) public StreamingOutput getCourse(@PathParam("id") final String courseId, @QueryParam("range") final Range range) { final CourseGroup course = courseService.getCourseGroup(courseId, range); if (course == null) { throw new WebApplicationException(Response.Status.NOT_FOUND); } return new StreamingOutput() { public void write(OutputStream output) throws IOException, WebApplicationException { objectMapper.writeValue(output, course); } }; } @Path("/all") @GET @Produces(MediaType.APPLICATION_JSON) public StreamingOutput getCourses(@QueryParam("range") final Range range) { final List<CourseGroup> groups = courseService.search(""); if (groups == null) { throw new WebApplicationException(Response.Status.NOT_FOUND); } return new StreamingOutput() { public void write(OutputStream output) throws IOException, WebApplicationException { objectMapper.typedWriter(TypeFactory.collectionType(List.class, CourseGroup.class)).writeValue(output, groups); } }; } /** * This gets all the courses for a department that have upcoming * parts. * @param deptId The department to load the courses for. * @return An array of jsTree nodes. */ @Path("/dept/{deptId}") @Produces(MediaType.APPLICATION_JSON) @GET public StreamingOutput getCoursesUpcoming(@PathParam("deptId") final String deptId, @QueryParam("components") final Range range) { final List<CourseGroup> courses = courseService.getCourseGroups(deptId, range); return new GroupsStreamingOutput(courses, deptId, range.name()); } @Path("/admin") @GET @Produces(MediaType.APPLICATION_JSON) public Response getAdminCourse() throws JsonGenerationException, JsonMappingException, IOException { List <CourseGroup> groups = courseService.getAdministering(); // TODO Just return the coursegroups (no nested objects). return Response.ok(objectMapper.typedWriter(TypeFactory.collectionType(List.class, CourseGroup.class)).writeValueAsString(groups)).build(); } @Path("/search") @GET @Produces(MediaType.APPLICATION_JSON) public Response setCourses(@QueryParam("terms") String terms) throws JsonGenerationException, JsonMappingException, IOException { if (terms == null) { throw new WebApplicationException(); } List<CourseGroup> groups = courseService.search(terms); return Response.ok(objectMapper.typedWriter(TypeFactory.collectionType(List.class, CourseGroup.class)).writeValueAsString(groups)).build(); } /** * Formats a duration sensibly. * @param remaining Time remaining in milliseconds. * @return a String roughly representing the durnation. */ private String formatDuration(long remaining) { if (remaining < 1000) { return "< 1 second"; } else if (remaining < 60000) { return remaining / 1000 + " seconds"; } else if (remaining < 3600000) { return remaining / 60000 + " minutes"; } else if (remaining < 86400000) { return remaining / 3600000 + " hours"; } else { return remaining / 86400000 + " days"; } } private String summary(Date now, CourseGroup courseGroup) { // Calculate the summary based on the available components. if (courseGroup.getComponents().isEmpty()) { return "none available"; } Date nextOpen = new Date(Long.MAX_VALUE); Date willClose = new Date(0); boolean isOneOpen = false; boolean isOneBookable = false; boolean areSomePlaces = false; for (CourseComponent component: courseGroup.getComponents()) { if (!isOneBookable) { isOneBookable = component.getBookable(); } // Check if component is the earliest one opening in the future. if (component.getOpens().after(now) && component.getOpens().before(nextOpen)) { nextOpen = component.getOpens(); } // Check if the component is open and is open for the longest. if (component.getOpens().before(now) && component.getCloses().after(willClose)) { willClose = component.getCloses(); } if (!isOneOpen && component.getOpens().before(now) && component.getCloses().after(now)) { isOneOpen = true; if (component.getPlaces() > 0) { areSomePlaces = true; } } } String detail = null; if (!isOneBookable) { return null; // No signup available. } if (isOneOpen) { if (areSomePlaces) { long remaining = willClose.getTime() - now.getTime(); detail = "close in "+ formatDuration(remaining); } else { detail = "full"; } } else { if (nextOpen.getTime() == Long.MAX_VALUE) { return null; // Didn't find one open } long until = nextOpen.getTime() - now.getTime(); detail = "open in "+ formatDuration(until); } return detail; } private class GroupsStreamingOutput implements StreamingOutput { private final List<CourseGroup> courses; private final String deptId; private final String range; private GroupsStreamingOutput(List<CourseGroup> courses, String deptId, String range) { this.courses = courses; this.deptId = deptId; this.range = range; } public void write(OutputStream out) throws IOException { Date now = courseService.getNow(); JsonGenerator gen = jsonFactory.createJsonGenerator(out, JsonEncoding.UTF8); gen.writeStartObject(); gen.writeObjectField("dept", deptId); gen.writeObjectField("range", range); gen.writeArrayFieldStart("tree"); for (CourseGroup courseGroup : courses) { gen.writeStartObject(); gen.writeObjectFieldStart("attr"); gen.writeObjectField("id", courseGroup.getId()); gen.writeEndObject(); String detail = summary(now, courseGroup); gen.writeObjectField("data", courseGroup.getTitle() + (detail == null?"":(" ("+detail+")")) ); gen.writeEndObject(); } gen.writeEndArray(); gen.writeEndObject(); gen.close(); } } }
Fix issue whereby some courses were wrongly shown as full. This was an intermittent issue which depending on the ordering that the components were given back.
tool/src/main/java/uk/ac/ox/oucs/vle/CourseResource.java
Fix issue whereby some courses were wrongly shown as full. This was an intermittent issue which depending on the ordering that the components were given back.
Java
apache-2.0
1de9f0925dc18883f109d52e1598ec140cea464f
0
robinverduijn/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,gradle/gradle,gradle/gradle,gstevey/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,lsmaira/gradle,lsmaira/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,gstevey/gradle,robinverduijn/gradle,lsmaira/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gstevey/gradle,gstevey/gradle,gradle/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gstevey/gradle,blindpirate/gradle,gradle/gradle,gstevey/gradle,robinverduijn/gradle,gradle/gradle,robinverduijn/gradle
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.performance.fixture; import org.apache.commons.io.FileUtils; import org.gradle.internal.UncheckedException; import java.io.File; import java.io.IOException; public class LogFiles { public static File createTempLogFile(String prefix, String postfix) { try { // The directory is passed as an argument since File.createTempFile sets the location // of the temp directory to a static variable on the first call unless a directory is passed to the call. // Some tests change java.io.tmpdir and this is to ensure that the current value of java.io.tmpdir gets used here. return File.createTempFile(prefix, postfix, new File(System.getProperty("java.io.tmpdir"))); } catch (IOException e) { throw UncheckedException.throwAsUncheckedException(e); } } public static void copyLogFile(File logFile, BuildExperimentInvocationInfo invocationInfo, String prefix, String postfix) { String fileName = createFileNameForBuildInvocation(invocationInfo, prefix, postfix); try { FileUtils.copyFile(logFile, new File(invocationInfo.getProjectDir(), fileName)); } catch (IOException e) { // ignore } } public static String createFileNameForBuildInvocation(BuildExperimentInvocationInfo invocationInfo, String prefix, String postfix) { return (prefix + invocationInfo.getBuildExperimentSpec().getDisplayName() + "_" + invocationInfo.getBuildExperimentSpec().getProjectName() + "_" + invocationInfo.getPhase().name().toLowerCase() + "_" + invocationInfo.getIterationNumber() + "_" + invocationInfo.getIterationMax() + postfix).replaceAll("[^a-zA-Z0-9.-]", "_").replaceAll("[_]+", "_"); } }
subprojects/internal-performance-testing/src/main/groovy/org/gradle/performance/fixture/LogFiles.java
/* * Copyright 2015 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.gradle.performance.fixture; import org.apache.commons.io.FileUtils; import org.gradle.internal.UncheckedException; import java.io.File; import java.io.IOException; public class LogFiles { public static File createTempLogFile(String prefix, String postfix) { try { // see comment in org.gradle.integtests.fixtures.executer.InProcessGradleExecuter.getDefaultTmpDir() to find out why directory is passed explicitly return File.createTempFile(prefix, postfix, new File(System.getProperty("java.io.tmpdir"))); } catch (IOException e) { throw UncheckedException.throwAsUncheckedException(e); } } public static void copyLogFile(File logFile, BuildExperimentInvocationInfo invocationInfo, String prefix, String postfix) { String fileName = createFileNameForBuildInvocation(invocationInfo, prefix, postfix); try { FileUtils.copyFile(logFile, new File(invocationInfo.getProjectDir(), fileName)); } catch (IOException e) { // ignore } } public static String createFileNameForBuildInvocation(BuildExperimentInvocationInfo invocationInfo, String prefix, String postfix) { return (prefix + invocationInfo.getBuildExperimentSpec().getDisplayName() + "_" + invocationInfo.getBuildExperimentSpec().getProjectName() + "_" + invocationInfo.getPhase().name().toLowerCase() + "_" + invocationInfo.getIterationNumber() + "_" + invocationInfo.getIterationMax() + postfix).replaceAll("[^a-zA-Z0-9.-]", "_").replaceAll("[_]+", "_"); } }
Explain the reasoning inline instead of referencing another comment
subprojects/internal-performance-testing/src/main/groovy/org/gradle/performance/fixture/LogFiles.java
Explain the reasoning inline instead of referencing another comment
Java
apache-2.0
dd3a6a01e1cae1489b3d210242aa99e7ff30deb8
0
surya-janani/sakai,ktakacs/sakai,hackbuteer59/sakai,tl-its-umich-edu/sakai,whumph/sakai,bkirschn/sakai,bkirschn/sakai,bzhouduke123/sakai,puramshetty/sakai,ouit0408/sakai,udayg/sakai,kingmook/sakai,frasese/sakai,frasese/sakai,joserabal/sakai,buckett/sakai-gitflow,ktakacs/sakai,willkara/sakai,ouit0408/sakai,OpenCollabZA/sakai,clhedrick/sakai,Fudan-University/sakai,hackbuteer59/sakai,Fudan-University/sakai,hackbuteer59/sakai,zqian/sakai,joserabal/sakai,rodriguezdevera/sakai,kwedoff1/sakai,udayg/sakai,Fudan-University/sakai,introp-software/sakai,pushyamig/sakai,joserabal/sakai,buckett/sakai-gitflow,surya-janani/sakai,rodriguezdevera/sakai,joserabal/sakai,whumph/sakai,ktakacs/sakai,ktakacs/sakai,udayg/sakai,kwedoff1/sakai,noondaysun/sakai,willkara/sakai,buckett/sakai-gitflow,clhedrick/sakai,tl-its-umich-edu/sakai,whumph/sakai,noondaysun/sakai,clhedrick/sakai,pushyamig/sakai,ouit0408/sakai,noondaysun/sakai,conder/sakai,noondaysun/sakai,liubo404/sakai,joserabal/sakai,conder/sakai,lorenamgUMU/sakai,pushyamig/sakai,Fudan-University/sakai,bkirschn/sakai,hackbuteer59/sakai,tl-its-umich-edu/sakai,bkirschn/sakai,noondaysun/sakai,udayg/sakai,kingmook/sakai,bzhouduke123/sakai,liubo404/sakai,Fudan-University/sakai,puramshetty/sakai,kwedoff1/sakai,OpenCollabZA/sakai,rodriguezdevera/sakai,zqian/sakai,pushyamig/sakai,wfuedu/sakai,whumph/sakai,frasese/sakai,OpenCollabZA/sakai,wfuedu/sakai,duke-compsci290-spring2016/sakai,introp-software/sakai,bkirschn/sakai,colczr/sakai,frasese/sakai,liubo404/sakai,joserabal/sakai,joserabal/sakai,frasese/sakai,willkara/sakai,OpenCollabZA/sakai,colczr/sakai,tl-its-umich-edu/sakai,liubo404/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,clhedrick/sakai,colczr/sakai,willkara/sakai,wfuedu/sakai,buckett/sakai-gitflow,willkara/sakai,puramshetty/sakai,kingmook/sakai,udayg/sakai,duke-compsci290-spring2016/sakai,ouit0408/sakai,pushyamig/sakai,wfuedu/sakai,liubo404/sakai,liubo404/sakai,hackbuteer59/sakai,ouit0408/sakai,wfuedu/sakai,kwedoff1/sakai,surya-janani/sakai,buckett/sakai-gitflow,rodriguezdevera/sakai,frasese/sakai,Fudan-University/sakai,ktakacs/sakai,bzhouduke123/sakai,bkirschn/sakai,zqian/sakai,puramshetty/sakai,OpenCollabZA/sakai,kingmook/sakai,rodriguezdevera/sakai,zqian/sakai,rodriguezdevera/sakai,tl-its-umich-edu/sakai,colczr/sakai,whumph/sakai,conder/sakai,whumph/sakai,kwedoff1/sakai,puramshetty/sakai,bzhouduke123/sakai,lorenamgUMU/sakai,ktakacs/sakai,colczr/sakai,introp-software/sakai,pushyamig/sakai,colczr/sakai,conder/sakai,noondaysun/sakai,duke-compsci290-spring2016/sakai,buckett/sakai-gitflow,kingmook/sakai,colczr/sakai,ouit0408/sakai,buckett/sakai-gitflow,conder/sakai,surya-janani/sakai,bzhouduke123/sakai,introp-software/sakai,bkirschn/sakai,lorenamgUMU/sakai,buckett/sakai-gitflow,udayg/sakai,kwedoff1/sakai,duke-compsci290-spring2016/sakai,Fudan-University/sakai,introp-software/sakai,udayg/sakai,zqian/sakai,lorenamgUMU/sakai,bkirschn/sakai,duke-compsci290-spring2016/sakai,duke-compsci290-spring2016/sakai,lorenamgUMU/sakai,conder/sakai,puramshetty/sakai,introp-software/sakai,tl-its-umich-edu/sakai,duke-compsci290-spring2016/sakai,ouit0408/sakai,wfuedu/sakai,OpenCollabZA/sakai,tl-its-umich-edu/sakai,lorenamgUMU/sakai,wfuedu/sakai,surya-janani/sakai,rodriguezdevera/sakai,puramshetty/sakai,bzhouduke123/sakai,liubo404/sakai,whumph/sakai,frasese/sakai,clhedrick/sakai,clhedrick/sakai,introp-software/sakai,willkara/sakai,hackbuteer59/sakai,wfuedu/sakai,OpenCollabZA/sakai,bzhouduke123/sakai,udayg/sakai,ouit0408/sakai,hackbuteer59/sakai,OpenCollabZA/sakai,joserabal/sakai,kingmook/sakai,lorenamgUMU/sakai,frasese/sakai,kwedoff1/sakai,zqian/sakai,tl-its-umich-edu/sakai,surya-janani/sakai,surya-janani/sakai,clhedrick/sakai,pushyamig/sakai,surya-janani/sakai,colczr/sakai,conder/sakai,whumph/sakai,conder/sakai,kingmook/sakai,ktakacs/sakai,rodriguezdevera/sakai,introp-software/sakai,kwedoff1/sakai,pushyamig/sakai,liubo404/sakai,clhedrick/sakai,duke-compsci290-spring2016/sakai,zqian/sakai,noondaysun/sakai,puramshetty/sakai,willkara/sakai,zqian/sakai,Fudan-University/sakai,hackbuteer59/sakai,ktakacs/sakai,noondaysun/sakai,kingmook/sakai,willkara/sakai
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.component.app.help; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.queryparser.classic.QueryParser; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.Version; import org.hibernate.HibernateException; import org.hibernate.Session; import org.sakaiproject.api.app.help.Category; import org.sakaiproject.api.app.help.Context; import org.sakaiproject.api.app.help.Glossary; import org.sakaiproject.api.app.help.GlossaryEntry; import org.sakaiproject.api.app.help.HelpManager; import org.sakaiproject.api.app.help.Resource; import org.sakaiproject.api.app.help.RestConfiguration; import org.sakaiproject.api.app.help.Source; import org.sakaiproject.api.app.help.TableOfContents; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.component.app.help.model.CategoryBean; import org.sakaiproject.component.app.help.model.ContextBean; import org.sakaiproject.component.app.help.model.ResourceBean; import org.sakaiproject.component.app.help.model.SourceBean; import org.sakaiproject.component.app.help.model.TableOfContentsBean; import org.sakaiproject.tool.api.Tool; import org.sakaiproject.tool.api.ToolManager; import org.sakaiproject.user.api.PreferencesService; import org.sakaiproject.user.api.UserDirectoryService; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.xml.XmlBeanFactory; import org.springframework.core.io.UrlResource; import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.HibernateObjectRetrievalFailureException; import org.springframework.orm.hibernate3.HibernateTransactionManager; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * HelpManager provides database and search capabilitites for the Sakai help tool. * @author <a href="mailto:jlannan.iupui.edu">Jarrod Lannan</a> * @version $Id$ * */ public class HelpManagerImpl extends HibernateDaoSupport implements HelpManager { private static final String QUERY_GETRESOURCEBYDOCID = "query.getResourceByDocId"; private static final String QUERY_GETCATEGORYBYNAME = "query.getCategoryByName"; private static final String QUERY_GET_WELCOME_PAGE = "query.getWelcomePage"; private static final String DOCID = "docId"; private static final String WELCOME_PAGE = "welcomePage"; private static final String NAME = "name"; private static final String LUCENE_INDEX_PATH = System .getProperty("java.io.tmpdir") + File.separator + "sakai.help"; private static final String TOC_API = "org.sakaiproject.api.app.help.TableOfContents"; private static String EXTERNAL_URL; private static String DEFAULT_HELP_FILE = "help.xml"; private static String HELP_BASENAME = "help"; private static String DEFAULT_LOCALE = "default"; private Map<String, List> helpContextConfig = new HashMap<String, List>(); private int contextSize; private RestConfiguration restConfiguration; private ServerConfigurationService serverConfigurationService; // Map which contains all localized help toc private Map<String, TableOfContentsBean> toc; // All supported locales private List<String> locales; private Boolean initialized = Boolean.FALSE; private Object initializedLock = new Object(); private Glossary glossary; private String supportEmailAddress; private ToolManager toolManager; private HibernateTransactionManager txManager; private static final Log LOG = LogFactory.getLog(HelpManagerImpl.class); /** * @see org.sakaiproject.api.app.help.HelpManager#getServerConfigurationService() */ public ServerConfigurationService getServerConfigurationService() { return serverConfigurationService; } /** * @see org.sakaiproject.api.app.help.HelpManager#setServerConfigurationService(org.sakaiproject.service.framework.config.ServerConfigurationService) */ public void setServerConfigurationService(ServerConfigurationService s) { serverConfigurationService = s; } private PreferencesService preferencesService; public void setPreferencesService(PreferencesService preferencesService) { this.preferencesService = preferencesService; } private UserDirectoryService userDirectoryService; public void setUserDirectoryService(UserDirectoryService userDirectoryService) { this.userDirectoryService = userDirectoryService; } public List getContexts(String mappedView) { return (List) helpContextConfig.get(mappedView); } public List getActiveContexts(Map session) { List contexts = (List) session.get("help_contexts"); if (contexts == null) { contexts = new SizedList(getContextSize()); session.put("help_contexts", contexts); } return contexts; } public void addContexts(Map session, String mappedView) { List newContexts = getContexts(mappedView); List contexts = getActiveContexts(session); if (newContexts != null) { contexts.addAll(newContexts); } } /** * return list of resources matching context id * * @param contextId * @return */ public Set<Resource> getResources(Long contextId) { return searchResources(new TermQuery(new Term("context", "\"" + contextId + "\""))); } /** * Store resource * @see org.sakaiproject.api.app.help.HelpManager#storeResource(org.sakaiproject.api.help.Entity) */ public void storeResource(Resource resource) { getHibernateTemplate().saveOrUpdate(resource); } /** * @see org.sakaiproject.api.app.help.HelpManager#getResource(java.lang.Long) */ public Resource getResource(Long id) { return (ResourceBean) getHibernateTemplate().get(ResourceBean.class, id); } /** * @see org.sakaiproject.api.app.help.HelpManager#deleteResource(java.lang.Long) */ public void deleteResource(Long resourceId) { Resource resource = getResource(resourceId); if (resource == null) { return; } getHibernateTemplate().delete(resource); } /** * @see org.sakaiproject.api.app.help.HelpManager#getSource(java.lang.Long) */ public Source getSource(Long id) { try { return (SourceBean) getHibernateTemplate().load(SourceBean.class, id); } catch (HibernateObjectRetrievalFailureException e) { return null; } } /** * @see org.sakaiproject.api.app.help.HelpManager#storeSource(org.sakaiproject.api.help.Source) */ public void storeSource(Source source) { getHibernateTemplate().saveOrUpdate(source); } /** * @see org.sakaiproject.api.app.help.HelpManager#deleteSource(java.lang.Long) */ public void deleteSource(Long sourceId) { Source source = getSource(sourceId); if (source == null) { return; } getHibernateTemplate().delete(source); } /** * @see org.sakaiproject.api.app.help.HelpManager#getContext(java.lang.Long) */ public Context getContext(Long id) { try { return (ContextBean) getHibernateTemplate().load(ContextBean.class, id); } catch (HibernateObjectRetrievalFailureException e) { return null; } } /** * @see org.sakaiproject.api.app.help.HelpManager#storeContext(org.sakaiproject.api.help.Context) */ public void storeContext(Context context) { getHibernateTemplate().saveOrUpdate(context); } /** * @see org.sakaiproject.api.app.help.HelpManager#deleteContext(java.lang.Long) */ public void deleteContext(Long contextId) { Context context = getContext(contextId); if (context == null) { return; } getHibernateTemplate().delete(context); } /** * @see org.sakaiproject.api.app.help.HelpManager#getResourcesForActiveContexts(java.util.Map) */ public Map getResourcesForActiveContexts(Map session) { Map<String, Set<Resource>> resourceMap = new HashMap<String, Set<Resource>>(); List<String> activeContexts = getActiveContexts(session); for(String context : activeContexts) { try { Set<Resource> resources = searchResources(new TermQuery(new Term("context", "\"" + context + "\""))); if (resources != null && resources.size() > 0) { resourceMap.put(context, resources); } } catch (Exception e) { LOG.error(e); } } return resourceMap; } /** * @see org.sakaiproject.api.app.help.HelpManager#searchResources(java.lang.String) */ public Set<Resource> searchResources(String queryStr) { initialize(); try { return searchResources(queryStr, "content"); } catch (ParseException e) { LOG.debug("ParseException parsing Help search query " + queryStr, e); return null; } } /** * @see org.sakaiproject.api.app.help.HelpManager#getTableOfContents() */ public TableOfContents getTableOfContents() { initialize(); return getToc(); } /** * @see org.sakaiproject.api.app.help.HelpManager#setTableOfContents(org.sakaiproject.api.help.TableOfContents) */ public void setTableOfContents(TableOfContents toc) { setToc((TableOfContentsBean) toc); } /** * @see org.sakaiproject.api.app.help.HelpManager#searchGlossary(java.lang.String) */ public GlossaryEntry searchGlossary(String keyword) { return getGlossary().find(keyword); } /** * Search Resources * @param query * @return Set of matching results. */ protected Set<Resource> searchResources(Query query) { Set<Resource> results = new HashSet<Resource>(); String locale = getSelectedLocale().toString(); if (!toc.containsKey(locale)) { locale = DEFAULT_LOCALE; } String luceneFolder = LUCENE_INDEX_PATH + File.separator + locale; IndexReader reader = null; FSDirectory dir = null; try { dir = FSDirectory.open(new File(luceneFolder)); reader = DirectoryReader.open(dir); IndexSearcher searcher = new IndexSearcher(reader); LOG.debug("Searching for: " + query.toString()); //Hits hits = searcher.search(query); TopDocs topDocs = searcher.search(query, 1000); ScoreDoc[] hits = topDocs.scoreDocs; LOG.debug(hits.length + " total matching documents"); for (ScoreDoc scoreDoc : hits) { Document doc = searcher.doc(scoreDoc.doc); ResourceBean resource = getResourceFromDocument(doc); resource.setScore(scoreDoc.score * 100); results.add(resource); } } catch (Exception e) { LOG.error(e); } finally { //http://mail-archives.apache.org/mod_mbox/lucene-java-user/201304.mbox/%3CCAGaRif0agg+XCXbccdxUmB5h9v5dHqjEvwi5X_vmU3sMM20QZg@mail.gmail.com%3E if (reader != null) { try { reader.close(); } catch (IOException e) { //nothing to do } } if (dir != null) { dir.close(); } } return results; } /** * Search Lucene * * @param queryStr * @param defaultField * @return * @throws ParseException */ protected Set<Resource> searchResources(String queryStr, String defaultField) throws ParseException { Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_40); QueryParser parser = new QueryParser(Version.LUCENE_40, defaultField, analyzer); Query query = parser.parse(queryStr); return searchResources(query); } /** * Get Resource From Document. * @param document * @return resource bean */ protected ResourceBean getResourceFromDocument(Document document) { Long id = new Long(document.getField("id").stringValue()); return (ResourceBean) getResource(id); } /** * Get entire Collection of Resources. * @return collection of resources */ protected Collection<? extends Resource> getResources() { return getHibernateTemplate().loadAll(ResourceBean.class); } /** * Get ContextSize. * @return size of Context. */ public int getContextSize() { return contextSize; } /** * Set ContextSize * @param contextSize */ public void setContextSize(int contextSize) { this.contextSize = contextSize; } /** * Get Document. * @param resource * @return document * @throws IOException * @throws MalformedURLException */ protected Document getDocument(ResourceBean resource) throws IOException, MalformedURLException { Document doc = new Document(); if (resource.getContexts() != null) { for (String context : resource.getContexts()) { doc.add(new Field("context", "\"" + context + "\"", Field.Store.YES, Field.Index.NOT_ANALYZED)); } } URL urlResource; URLConnection urlConnection = null; //For local file override String sakaiHomePath = serverConfigurationService.getSakaiHomePath(); String localHelpPath = sakaiHomePath+serverConfigurationService.getString("help.localpath","/help/"); File localFile = new File(localHelpPath+resource.getLocation()); boolean localFileIsFile = false; if(localFile.isFile()) { LOG.debug("Local help file overrides: "+resource.getLocation()); localFileIsFile = true; } StringBuilder sb = new StringBuilder(); if (resource.getLocation() == null || resource.getLocation().startsWith("/")) { // handle REST content if (!getRestConfiguration().getOrganization().equals("sakai")) { urlResource = new URL(getRestConfiguration().getRestUrlInDomain() + resource.getDocId() + "?domain=" + getRestConfiguration().getRestDomain()); urlConnection = urlResource.openConnection(); String basicAuthUserPass = getRestConfiguration().getRestCredentials(); String encoding = Base64.encodeBase64(basicAuthUserPass.getBytes("utf-8")).toString(); urlConnection.setRequestProperty("Authorization", "Basic " + encoding); BufferedReader br = new BufferedReader(new InputStreamReader( urlConnection.getInputStream()), 512); try { int readReturn = 0; char[] cbuf = new char[512]; while ((readReturn = br.read(cbuf, 0, 512)) != -1) { sb.append(cbuf, 0, readReturn); } } finally { br.close(); } // if document is coming from corpus then get document name from xml and assign to resource String resourceName = getRestConfiguration().getResourceNameFromCorpusDoc(sb.toString()); resource.setName(resourceName); storeResource(resource); } else if (!"".equals(EXTERNAL_URL)) { // handle external help location urlResource = new URL(EXTERNAL_URL + resource.getLocation()); } else { // Add the home folder file reading here if(localFileIsFile) { urlResource = localFile.toURI().toURL(); } else { // handle classpath location urlResource = getClass().getResource(resource.getLocation()); } } } else { // handle external location specified in reg file urlResource = new URL(resource.getLocation()); } if (urlResource == null) { return null; } if (resource.getLocation() != null){ String resLocation = resource.getLocation(); if(localFileIsFile) { resLocation = localFile.getPath(); } doc.add(new Field("location", resLocation, Field.Store.YES, Field.Index.NOT_ANALYZED)); } //doc.add(Field.Keyword("id", resource.getId().toString())); doc.add(new Field("id", resource.getId().toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); if (getRestConfiguration().getOrganization().equals("sakai")) { Reader reader = new BufferedReader(new InputStreamReader(urlResource.openStream())); try { int readReturn = 0; char[] cbuf = new char[512]; while ((readReturn = reader.read(cbuf, 0, 512)) != -1) { sb.append(cbuf, 0, readReturn); } } finally { reader.close(); } } //doc.add(Field.Text("content", sb.toString())); doc.add(new Field("content", sb.toString(), Field.Store.YES, Field.Index.ANALYZED)); return doc; } /** * Get Table Of Contents Bean. * @return table of contents bean */ public TableOfContentsBean getToc() { if (toc == null) { return null; } String locale = getSelectedLocale().toString(); if (toc.containsKey(locale)) { return toc.get(locale); } else { return toc.get(DEFAULT_LOCALE); } } /** * Set Table Of Contents Bean. * @param toc */ public void setToc(TableOfContentsBean toc) { this.toc.put(DEFAULT_LOCALE, toc); } /** * @see org.sakaiproject.api.app.help.HelpManager#getGlossary() */ public Glossary getGlossary() { return glossary; } /** * Set Glossary. * @param glossary */ public void setGlossary(Glossary glossary) { this.glossary = glossary; } /** * @see org.sakaiproject.api.app.help.HelpManager#storeCategory(org.sakaiproject.api.help.Category) */ public void storeCategory(Category category) { getHibernateTemplate().saveOrUpdate(category); } /** * @see org.sakaiproject.api.app.help.HelpManager#createCategory() */ public Category createCategory() { return new CategoryBean(); } /** * @see org.sakaiproject.api.app.help.HelpManager#createResource() */ public Resource createResource() { return new ResourceBean(); } /** * @see org.sakaiproject.api.app.help.HelpManager#getResourceByDocId(java.lang.String) */ public Resource getResourceByDocId(final String docId) { HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { org.hibernate.Query q = session .getNamedQuery(QUERY_GETRESOURCEBYDOCID); q.setString(DOCID, (docId == null) ? null : docId.toLowerCase()); if (q.list().size() == 0){ return null; } else{ return (Resource) q.list().get(0); } } }; Resource resource = (Resource) getHibernateTemplate().execute(hcb); return resource; } /** * @see org.sakaiproject.api.app.help.HelpManager#getWelcomePage() */ public String getWelcomePage() { initialize(); HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { org.hibernate.Query q = session .getNamedQuery(QUERY_GET_WELCOME_PAGE); q.setString(WELCOME_PAGE, "true"); if (q.list().size() == 0){ return null; } else{ return ((Resource) q.list().get(0)).getDocId(); } } }; return (String) getHibernateTemplate().execute(hcb); } /** * Find a Category by name * @param name * @return Category */ public Category getCategoryByName(final String name) { HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { org.hibernate.Query q = session .getNamedQuery(QUERY_GETCATEGORYBYNAME); q.setString(NAME, (name == null) ? name : name.toLowerCase()); return q.uniqueResult(); } }; return (Category) getHibernateTemplate().execute(hcb); } /** * Index Categories and Resources * @param categories */ private void indexRecursive(IndexWriter indexWriter, Set<Category> categories) { for (Category category: categories) { Set<Resource> resourcesList = category.getResources(); for (Resource resource : resourcesList) { try { Document doc = getDocument((ResourceBean)resource); if (doc != null) { indexWriter.addDocument(doc); LOG.debug("added resource '" + resource.getName() + "', doc count=" + indexWriter.maxDoc()); } else { LOG.debug("failed to add resource '" + "' (" + resource.getName()); } } catch (IOException e) { LOG.error("I/O error while adding resource '" + "' (" + resource.getName() + "): " + e.getMessage(), e); } } Set<Category> subCategories = category.getCategories(); indexRecursive(indexWriter, subCategories); } } /** * Store the mapping of Categories and Resources * @param categories */ private void storeRecursive(Set<Category> categories) { for(Category category: categories) { Set<Resource> resourcesList = category.getResources(); category.setResources(null); for (Resource resource: resourcesList) { resource.setDocId(resource.getDocId().toLowerCase()); resource.setCategory(category); } category.setResources(resourcesList); this.storeCategory(category); Set<Category> subCategories = category.getCategories(); storeRecursive(subCategories); } } /** * Get Support Email Address. * @see org.sakaiproject.api.app.help.HelpManager#getSupportEmailAddress() */ public String getSupportEmailAddress() { return supportEmailAddress; } /** * set Support Email Address. * @param email */ public void setSupportEmailAddress(String email) { this.supportEmailAddress = email; } /** * get tool manager * @return Returns the toolManager. */ public ToolManager getToolManager() { return toolManager; } /** * set tool manager * @param toolManager The toolManager to set. */ public void setToolManager(ToolManager toolManager) { this.toolManager = toolManager; } /** * @param txManager The txManager to set. */ public void setTxManager(HibernateTransactionManager txManager) { this.txManager = txManager; } /** * @see org.sakaiproject.api.app.help.HelpManager#getRestConfiguration() */ public RestConfiguration getRestConfiguration() { return restConfiguration; } /** * set REST configuration * @param restConfiguration */ public void setRestConfiguration(RestConfiguration restConfiguration) { this.restConfiguration = restConfiguration; } /** * Reinitialize help content from UI */ public void reInitialize(){ synchronized (initializedLock) { initialized = Boolean.FALSE; } initialize(); } /** * Synchronize first access to tool. * @see org.sakaiproject.api.app.help.HelpManager#initialize() */ public void initialize() { if (initialized.booleanValue()) { return; } else { synchronized (initializedLock) { if (!initialized.booleanValue()) { dropExistingContent(); // handle external help content EXTERNAL_URL = getServerConfigurationService().getString( "help.location"); if (!"".equals(EXTERNAL_URL)) { if (EXTERNAL_URL.endsWith("/")) { // remove trailing forward slash EXTERNAL_URL = EXTERNAL_URL.substring(0, EXTERNAL_URL.length() - 1); } } // Get all supported locales locales = new ArrayList<String>(); Locale[] sl = serverConfigurationService.getSakaiLocales(); for (Locale element : sl) { locales.add(element.toString()); // Locale toString should generate en_GB type identifiers } // Add default locale locales.add(DEFAULT_LOCALE); toc = new HashMap<String, TableOfContentsBean>(); registerHelpContent(); initialized = Boolean.TRUE; } } } } /** * @see org.sakaiproject.api.app.help.HelpManager#getExternalLocation() */ public String getExternalLocation() { return EXTERNAL_URL; } private void dropExistingContent() { if (LOG.isDebugEnabled()) { LOG.debug("dropExistingContent()"); } TransactionTemplate tt = new TransactionTemplate(txManager); tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { getHibernateTemplate().bulkUpdate("delete CategoryBean"); getHibernateTemplate().flush(); return null; } }); } /** * Returns the user locale * @param prefLocales * The prefLocales to set. */ private Locale getSelectedLocale() { Locale loc = preferencesService.getLocale(userDirectoryService.getCurrentUser().getId()); if (loc != null) { return loc; } else { return Locale.getDefault(); } } /** * Register help content either locally or externally * Index resources in Lucene */ private void registerHelpContent() { if (LOG.isDebugEnabled()) { LOG.debug("registerHelpContent()"); } // register external help docs if (!"".equals(EXTERNAL_URL)) { registerExternalHelpContent(EXTERNAL_URL + "/" + DEFAULT_HELP_FILE); } else { registerStaticContent(); } // Create lucene indexes for each toc (which key is either a locale or 'default') for (String key : toc.keySet()) { String luceneIndexPath = LUCENE_INDEX_PATH + File.separator + key; TableOfContentsBean currentToc = toc.get(key); // create index in lucene IndexWriter writer = null; Date start = new Date(); try { //writer = new IndexWriter(luceneIndexPath, new StandardAnalyzer(Version.LUCENE_40), true); FSDirectory directory = FSDirectory.open(new File(luceneIndexPath)); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40)); writer = new IndexWriter(directory, config); } catch (IOException e) { LOG.error("failed to create IndexWriter " + e.getMessage(), e); return; } // Index categories and resources indexRecursive(writer, currentToc.getCategories()); try { // writer.optimize(); writer.commit(); writer.close(); } catch (IOException e) { LOG.error("failed to close writer " + e.getMessage(), e); } Date end = new Date(); LOG.info("finished initializing lucene for '" + key + "' in " + (end.getTime() - start.getTime()) + " total milliseconds"); } } /** * register external help content * build document from external reg file * @param externalHelpReg */ public void registerExternalHelpContent(String helpFile) { Set<Category> categories = new TreeSet<Category>(); URL urlResource = null; InputStream ism = null; BufferedInputStream bis = null; try { try { urlResource = new URL(EXTERNAL_URL + "/" + helpFile); ism = urlResource.openStream(); } catch (IOException e) { // Try default help file helpFile = DEFAULT_HELP_FILE; urlResource = new URL(EXTERNAL_URL + "/" + helpFile); ism = urlResource.openStream(); } bis = new BufferedInputStream(ism); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); DocumentBuilder builder = dbf.newDocumentBuilder(); InputSource is = new org.xml.sax.InputSource(bis); org.w3c.dom.Document xmlDocument = builder.parse(is); Node helpRegNode = (Node) xmlDocument.getDocumentElement(); recursiveExternalReg(helpRegNode, null, categories); // handle corpus docs if (!getRestConfiguration().getOrganization().equals("sakai")){ // get corpus document String corpusXml = getRestConfiguration().getCorpusDocument(); DocumentBuilderFactory dbfCorpus = DocumentBuilderFactory.newInstance(); dbfCorpus.setNamespaceAware(true); DocumentBuilder builderCorpus = dbfCorpus.newDocumentBuilder(); StringReader sReader = new StringReader(corpusXml); InputSource isCorpus = new org.xml.sax.InputSource(sReader); org.w3c.dom.Document xmlDocumentCorpus = builderCorpus.parse(isCorpus); registerCorpusDocs(xmlDocumentCorpus); sReader.close(); } } catch (MalformedURLException e) { LOG.warn("Unable to load external URL: " + EXTERNAL_URL + "/" + helpFile, e); } catch (IOException e) { LOG.warn("I/O error opening external URL: " + EXTERNAL_URL + "/" + helpFile, e); } catch (ParserConfigurationException e) { LOG.error(e.getMessage(), e); } catch (SAXException e) { LOG.error(e.getMessage(), e); } finally { try{ if (bis != null){ bis.close(); } } catch (IOException e){ LOG.error("error closing stream", e); } } // Add to toc map TableOfContentsBean externalToc = new TableOfContentsBean(); externalToc.setCategories(categories); setTableOfContents(externalToc); } /** ** @return Locale based on its string representation (language_region) **/ private Locale getLocaleFromString(String localeString) { return serverConfigurationService.getLocaleFromString(localeString); } /** * Adds help for a specific locale * @param path * @param locale */ private void addToolHelp(String path, String locale) { URL urlResource = null; String classpathUrl = null; String sakaiHomePath = serverConfigurationService.getSakaiHomePath(); String localHelpPath = sakaiHomePath+serverConfigurationService.getString("help.localpath","/help/"); File localFile = null; // find default help file if ( locale.equals(DEFAULT_LOCALE) ) { classpathUrl = path + "/" + HELP_BASENAME + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } } // find localized help file else { classpathUrl = path + "/" + HELP_BASENAME + "_" + locale + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } // If language/region help file not found, look for language-only help file if ( urlResource == null ) { Locale nextLocale = getLocaleFromString(locale); classpathUrl = path + "/" + HELP_BASENAME + "_" + nextLocale.getLanguage() + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } } // If language-only help file not found, look for default help file if ( urlResource == null ) { classpathUrl = path + "/" + HELP_BASENAME + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } } } // Url exists? if (urlResource != null) { TableOfContentsBean localizedToc; // Add this tool categories to this tool toc try { org.springframework.core.io.Resource resource = new UrlResource(urlResource); BeanFactory beanFactory = new XmlBeanFactory(resource); TableOfContents tocTemp = (TableOfContents) beanFactory.getBean(TOC_API); Set<Category> categories = tocTemp.getCategories(); storeRecursive(categories); // Get localized toc if (toc.containsKey(locale)) { localizedToc = toc.get(locale); } else { // Create and add localized toc localizedToc = new TableOfContentsBean(); toc.put(locale, localizedToc); } // Update localized toc categories localizedToc.getCategories().addAll(categories); } catch (Exception e) { LOG.warn("Unable to load help index from " + classpathUrl + " : " + e.getMessage()); } } } /** * register local content */ public void registerStaticContent() { // register static content Set<Tool> toolSet = toolManager.findTools(null, null); // find out what we want to ignore List<String> hideHelp = Arrays.asList(StringUtils.split(serverConfigurationService.getString("help.hide"), ",")); if (hideHelp == null) { hideHelp = new ArrayList<String> (); } for (Tool tool : toolSet) { if (tool != null && tool.getId() != null && !hideHelp.contains(tool.getId())) { String[] extraCollections = {}; String toolHelpCollections = tool.getRegisteredConfig().getProperty(TOOLCONFIG_HELP_COLLECTIONS); if (toolHelpCollections != null) { extraCollections = StringUtils.split(toolHelpCollections, ","); } // Loop throughout the locales list for (String locale : locales) { // Add localized tool helps addToolHelp("/" + tool.getId().toLowerCase().replaceAll("\\.", "_"), locale); // Add any other optional collections for (String extraCollection : extraCollections) { addToolHelp("/" + extraCollection, locale); } } } } // Sort the help topics for each locale for (String locale : locales) { TableOfContentsBean localizedToc = toc.get(locale); // Sort this localized toc categories with a TreeSet if (localizedToc != null) { Set<Category> sortedCategories = new TreeSet<Category>(); Set<Category> categories = localizedToc.getCategories(); sortedCategories.addAll(categories); localizedToc.setCategories(sortedCategories); } } } private static int cnt = 0; /** * Parse external help reg doc recursively * @param n * @param category */ public void recursiveExternalReg(Node n, Category category, Set<Category> categories) { if (n == null) { return; } NodeList nodeList = n.getChildNodes(); int nodeListLength = nodeList.getLength(); for (int i = 0; i < nodeListLength; i++) { if (nodeList.item(i).getNodeType() != Node.ELEMENT_NODE) { continue; } Node currentNode = nodeList.item(i); if ("category".equals(currentNode.getNodeName())) { Category childCategory = new CategoryBean(); childCategory.setName(currentNode.getAttributes().getNamedItem("name") .getNodeValue()); if (category != null) { childCategory.setParent(category); category.getCategories().add(childCategory); } storeCategory(childCategory); categories.add(childCategory); LOG.info("adding help category: " + childCategory.getName()); recursiveExternalReg(currentNode, childCategory, categories); } else if ("resource".equals(currentNode.getNodeName())) { Resource resource = new ResourceBean(); NamedNodeMap nnm = currentNode.getAttributes(); if (nnm != null) { // name required resource.setName(nnm.getNamedItem("name").getNodeValue()); if (nnm.getNamedItem("location") != null) { resource.setLocation(nnm.getNamedItem("location").getNodeValue()); } if (nnm.getNamedItem("docId") != null) { resource.setDocId(nnm.getNamedItem("docId").getNodeValue()); } else { resource.setDocId(Integer.valueOf(cnt).toString()); cnt++; } //defaultForTool is an optional attribute if (nnm.getNamedItem("defaultForTool") != null) { resource.setDefaultForTool(nnm.getNamedItem("defaultForTool") .getNodeValue()); } // welcomePage is an optional attribute if (nnm.getNamedItem("welcomePage") != null) { resource.setWelcomePage(nnm.getNamedItem("welcomePage") .getNodeValue().toLowerCase()); } } resource.setCategory(category); category.getResources().add(resource); storeResource(resource); LOG.info("adding help resource: " + resource + " to category: " + category.getName()); recursiveExternalReg(currentNode, category, categories); } } } /** * Parse corpus document * @param doc document */ public void registerCorpusDocs(org.w3c.dom.Document doc) { if (doc == null) { return; } List<String> arrayCorpus = new ArrayList<String>(); NodeList nodeList = doc.getElementsByTagName("id"); int nodeListLength = nodeList.getLength(); for (int i = 0; i < nodeListLength; i++) { Node currentNode = nodeList.item(i); NodeList nlChildren = currentNode.getChildNodes(); for (int j = 0; j < nlChildren.getLength(); j++){ if (nlChildren.item(j).getNodeType() == Node.TEXT_NODE){ arrayCorpus.add(nlChildren.item(j).getNodeValue()); } } } // iterate through corpus docs and add to home category if not already // added by help.xml external registration // if Home category does not exist, then create it if (getCategoryByName("Home") == null){ Category cat = new CategoryBean(); cat.setName("Home"); storeCategory(cat); } for (int i = 0; i < arrayCorpus.size(); i++){ String currentDocId = (String) arrayCorpus.get(i); // if the corpus doc does not already exist from help.xml, then add it to the Home category if (this.getResourceByDocId(currentDocId) == null){ Resource resource = new ResourceBean(); resource.setDocId(currentDocId); resource.setName(currentDocId); Category homeCategory = getCategoryByName("Home"); resource.setCategory(homeCategory); homeCategory.getResources().add(resource); storeResource(resource); } } } }
help/help-component/src/java/org/sakaiproject/component/app/help/HelpManagerImpl.java
/********************************************************************************** * $URL$ * $Id$ *********************************************************************************** * * Copyright (c) 2003, 2004, 2005, 2006, 2007, 2008, 2009 The Sakai Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ECL-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * **********************************************************************************/ package org.sakaiproject.component.app.help; import java.io.BufferedInputStream; import java.io.BufferedReader; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.Reader; import java.io.StringReader; import java.net.MalformedURLException; import java.net.URL; import java.net.URLConnection; import java.sql.SQLException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.TreeSet; import javax.xml.parsers.DocumentBuilder; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.ParserConfigurationException; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.standard.StandardAnalyzer; import org.apache.lucene.document.Document; import org.apache.lucene.document.Field; import org.apache.lucene.index.IndexReader; import org.apache.lucene.index.DirectoryReader; import org.apache.lucene.index.IndexWriter; import org.apache.lucene.index.IndexWriterConfig; import org.apache.lucene.index.Term; import org.apache.lucene.queryparser.classic.ParseException; import org.apache.lucene.queryparser.classic.QueryParser; import org.apache.lucene.search.IndexSearcher; import org.apache.lucene.search.Query; import org.apache.lucene.search.ScoreDoc; import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.FSDirectory; import org.apache.lucene.util.Version; import org.hibernate.HibernateException; import org.hibernate.Session; import org.sakaiproject.api.app.help.Category; import org.sakaiproject.api.app.help.Context; import org.sakaiproject.api.app.help.Glossary; import org.sakaiproject.api.app.help.GlossaryEntry; import org.sakaiproject.api.app.help.HelpManager; import org.sakaiproject.api.app.help.Resource; import org.sakaiproject.api.app.help.RestConfiguration; import org.sakaiproject.api.app.help.Source; import org.sakaiproject.api.app.help.TableOfContents; import org.sakaiproject.component.api.ServerConfigurationService; import org.sakaiproject.component.app.help.model.CategoryBean; import org.sakaiproject.component.app.help.model.ContextBean; import org.sakaiproject.component.app.help.model.ResourceBean; import org.sakaiproject.component.app.help.model.SourceBean; import org.sakaiproject.component.app.help.model.TableOfContentsBean; import org.sakaiproject.tool.api.Tool; import org.sakaiproject.tool.api.ToolManager; import org.sakaiproject.user.api.PreferencesService; import org.sakaiproject.user.api.UserDirectoryService; import org.springframework.beans.factory.BeanFactory; import org.springframework.beans.factory.xml.XmlBeanFactory; import org.springframework.core.io.UrlResource; import org.springframework.orm.hibernate3.HibernateCallback; import org.springframework.orm.hibernate3.HibernateObjectRetrievalFailureException; import org.springframework.orm.hibernate3.HibernateTransactionManager; import org.springframework.orm.hibernate3.support.HibernateDaoSupport; import org.springframework.transaction.TransactionStatus; import org.springframework.transaction.support.TransactionCallback; import org.springframework.transaction.support.TransactionTemplate; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.xml.sax.InputSource; import org.xml.sax.SAXException; /** * HelpManager provides database and search capabilitites for the Sakai help tool. * @author <a href="mailto:jlannan.iupui.edu">Jarrod Lannan</a> * @version $Id$ * */ public class HelpManagerImpl extends HibernateDaoSupport implements HelpManager { private static final String QUERY_GETRESOURCEBYDOCID = "query.getResourceByDocId"; private static final String QUERY_GETCATEGORYBYNAME = "query.getCategoryByName"; private static final String QUERY_GET_WELCOME_PAGE = "query.getWelcomePage"; private static final String DOCID = "docId"; private static final String WELCOME_PAGE = "welcomePage"; private static final String NAME = "name"; private static final String LUCENE_INDEX_PATH = System .getProperty("java.io.tmpdir") + File.separator + "sakai.help"; private static final String TOC_API = "org.sakaiproject.api.app.help.TableOfContents"; private static String EXTERNAL_URL; private static String DEFAULT_HELP_FILE = "help.xml"; private static String HELP_BASENAME = "help"; private static String DEFAULT_LOCALE = "default"; private Map<String, List> helpContextConfig = new HashMap<String, List>(); private int contextSize; private RestConfiguration restConfiguration; private ServerConfigurationService serverConfigurationService; // Map which contains all localized help toc private Map<String, TableOfContentsBean> toc; // All supported locales private List<String> locales; private Boolean initialized = Boolean.FALSE; private Object initializedLock = new Object(); private Glossary glossary; private String supportEmailAddress; private ToolManager toolManager; private HibernateTransactionManager txManager; private static final Log LOG = LogFactory.getLog(HelpManagerImpl.class); /** * @see org.sakaiproject.api.app.help.HelpManager#getServerConfigurationService() */ public ServerConfigurationService getServerConfigurationService() { return serverConfigurationService; } /** * @see org.sakaiproject.api.app.help.HelpManager#setServerConfigurationService(org.sakaiproject.service.framework.config.ServerConfigurationService) */ public void setServerConfigurationService(ServerConfigurationService s) { serverConfigurationService = s; } private PreferencesService preferencesService; public void setPreferencesService(PreferencesService preferencesService) { this.preferencesService = preferencesService; } private UserDirectoryService userDirectoryService; public void setUserDirectoryService(UserDirectoryService userDirectoryService) { this.userDirectoryService = userDirectoryService; } public List getContexts(String mappedView) { return (List) helpContextConfig.get(mappedView); } public List getActiveContexts(Map session) { List contexts = (List) session.get("help_contexts"); if (contexts == null) { contexts = new SizedList(getContextSize()); session.put("help_contexts", contexts); } return contexts; } public void addContexts(Map session, String mappedView) { List newContexts = getContexts(mappedView); List contexts = getActiveContexts(session); if (newContexts != null) { contexts.addAll(newContexts); } } /** * return list of resources matching context id * * @param contextId * @return */ public Set<Resource> getResources(Long contextId) { return searchResources(new TermQuery(new Term("context", "\"" + contextId + "\""))); } /** * Store resource * @see org.sakaiproject.api.app.help.HelpManager#storeResource(org.sakaiproject.api.help.Entity) */ public void storeResource(Resource resource) { getHibernateTemplate().saveOrUpdate(resource); } /** * @see org.sakaiproject.api.app.help.HelpManager#getResource(java.lang.Long) */ public Resource getResource(Long id) { return (ResourceBean) getHibernateTemplate().get(ResourceBean.class, id); } /** * @see org.sakaiproject.api.app.help.HelpManager#deleteResource(java.lang.Long) */ public void deleteResource(Long resourceId) { Resource resource = getResource(resourceId); if (resource == null) { return; } getHibernateTemplate().delete(resource); } /** * @see org.sakaiproject.api.app.help.HelpManager#getSource(java.lang.Long) */ public Source getSource(Long id) { try { return (SourceBean) getHibernateTemplate().load(SourceBean.class, id); } catch (HibernateObjectRetrievalFailureException e) { return null; } } /** * @see org.sakaiproject.api.app.help.HelpManager#storeSource(org.sakaiproject.api.help.Source) */ public void storeSource(Source source) { getHibernateTemplate().saveOrUpdate(source); } /** * @see org.sakaiproject.api.app.help.HelpManager#deleteSource(java.lang.Long) */ public void deleteSource(Long sourceId) { Source source = getSource(sourceId); if (source == null) { return; } getHibernateTemplate().delete(source); } /** * @see org.sakaiproject.api.app.help.HelpManager#getContext(java.lang.Long) */ public Context getContext(Long id) { try { return (ContextBean) getHibernateTemplate().load(ContextBean.class, id); } catch (HibernateObjectRetrievalFailureException e) { return null; } } /** * @see org.sakaiproject.api.app.help.HelpManager#storeContext(org.sakaiproject.api.help.Context) */ public void storeContext(Context context) { getHibernateTemplate().saveOrUpdate(context); } /** * @see org.sakaiproject.api.app.help.HelpManager#deleteContext(java.lang.Long) */ public void deleteContext(Long contextId) { Context context = getContext(contextId); if (context == null) { return; } getHibernateTemplate().delete(context); } /** * @see org.sakaiproject.api.app.help.HelpManager#getResourcesForActiveContexts(java.util.Map) */ public Map getResourcesForActiveContexts(Map session) { Map<String, Set<Resource>> resourceMap = new HashMap<String, Set<Resource>>(); List<String> activeContexts = getActiveContexts(session); for(String context : activeContexts) { try { Set<Resource> resources = searchResources(new TermQuery(new Term("context", "\"" + context + "\""))); if (resources != null && resources.size() > 0) { resourceMap.put(context, resources); } } catch (Exception e) { LOG.error(e); } } return resourceMap; } /** * @see org.sakaiproject.api.app.help.HelpManager#searchResources(java.lang.String) */ public Set<Resource> searchResources(String queryStr) { initialize(); try { return searchResources(queryStr, "content"); } catch (ParseException e) { LOG.debug("ParseException parsing Help search query " + queryStr, e); return null; } } /** * @see org.sakaiproject.api.app.help.HelpManager#getTableOfContents() */ public TableOfContents getTableOfContents() { initialize(); return getToc(); } /** * @see org.sakaiproject.api.app.help.HelpManager#setTableOfContents(org.sakaiproject.api.help.TableOfContents) */ public void setTableOfContents(TableOfContents toc) { setToc((TableOfContentsBean) toc); } /** * @see org.sakaiproject.api.app.help.HelpManager#searchGlossary(java.lang.String) */ public GlossaryEntry searchGlossary(String keyword) { return getGlossary().find(keyword); } /** * Search Resources * @param query * @return Set of matching results. */ protected Set<Resource> searchResources(Query query) { Set<Resource> results = new HashSet<Resource>(); String locale = getSelectedLocale().toString(); if (!toc.containsKey(locale)) { locale = DEFAULT_LOCALE; } String luceneFolder = LUCENE_INDEX_PATH + File.separator + locale; IndexReader reader = null; FSDirectory dir = null; try { reader = DirectoryReader.open(FSDirectory.open(new File(luceneFolder))); IndexSearcher searcher = new IndexSearcher(reader); LOG.debug("Searching for: " + query.toString()); //Hits hits = searcher.search(query); TopDocs topDocs = searcher.search(query, 1000); ScoreDoc[] hits = topDocs.scoreDocs; LOG.debug(hits.length + " total matching documents"); for (ScoreDoc scoreDoc : hits) { Document doc = searcher.doc(scoreDoc.doc); ResourceBean resource = getResourceFromDocument(doc); resource.setScore(scoreDoc.score * 100); results.add(resource); } } catch (Exception e) { LOG.error(e); } finally { //http://mail-archives.apache.org/mod_mbox/lucene-java-user/201304.mbox/%3CCAGaRif0agg+XCXbccdxUmB5h9v5dHqjEvwi5X_vmU3sMM20QZg@mail.gmail.com%3E if (reader != null) { try { reader.close(); } catch (IOException e) { //nothing to do } } if (dir != null) { dir.close(); } } return results; } /** * Search Lucene * * @param queryStr * @param defaultField * @return * @throws ParseException */ protected Set<Resource> searchResources(String queryStr, String defaultField) throws ParseException { Analyzer analyzer = new StandardAnalyzer(Version.LUCENE_40); QueryParser parser = new QueryParser(Version.LUCENE_40, defaultField, analyzer); Query query = parser.parse(queryStr); return searchResources(query); } /** * Get Resource From Document. * @param document * @return resource bean */ protected ResourceBean getResourceFromDocument(Document document) { Long id = new Long(document.getField("id").stringValue()); return (ResourceBean) getResource(id); } /** * Get entire Collection of Resources. * @return collection of resources */ protected Collection<? extends Resource> getResources() { return getHibernateTemplate().loadAll(ResourceBean.class); } /** * Get ContextSize. * @return size of Context. */ public int getContextSize() { return contextSize; } /** * Set ContextSize * @param contextSize */ public void setContextSize(int contextSize) { this.contextSize = contextSize; } /** * Get Document. * @param resource * @return document * @throws IOException * @throws MalformedURLException */ protected Document getDocument(ResourceBean resource) throws IOException, MalformedURLException { Document doc = new Document(); if (resource.getContexts() != null) { for (String context : resource.getContexts()) { doc.add(new Field("context", "\"" + context + "\"", Field.Store.YES, Field.Index.NOT_ANALYZED)); } } URL urlResource; URLConnection urlConnection = null; //For local file override String sakaiHomePath = serverConfigurationService.getSakaiHomePath(); String localHelpPath = sakaiHomePath+serverConfigurationService.getString("help.localpath","/help/"); File localFile = new File(localHelpPath+resource.getLocation()); boolean localFileIsFile = false; if(localFile.isFile()) { LOG.debug("Local help file overrides: "+resource.getLocation()); localFileIsFile = true; } StringBuilder sb = new StringBuilder(); if (resource.getLocation() == null || resource.getLocation().startsWith("/")) { // handle REST content if (!getRestConfiguration().getOrganization().equals("sakai")) { urlResource = new URL(getRestConfiguration().getRestUrlInDomain() + resource.getDocId() + "?domain=" + getRestConfiguration().getRestDomain()); urlConnection = urlResource.openConnection(); String basicAuthUserPass = getRestConfiguration().getRestCredentials(); String encoding = Base64.encodeBase64(basicAuthUserPass.getBytes("utf-8")).toString(); urlConnection.setRequestProperty("Authorization", "Basic " + encoding); BufferedReader br = new BufferedReader(new InputStreamReader( urlConnection.getInputStream()), 512); try { int readReturn = 0; char[] cbuf = new char[512]; while ((readReturn = br.read(cbuf, 0, 512)) != -1) { sb.append(cbuf, 0, readReturn); } } finally { br.close(); } // if document is coming from corpus then get document name from xml and assign to resource String resourceName = getRestConfiguration().getResourceNameFromCorpusDoc(sb.toString()); resource.setName(resourceName); storeResource(resource); } else if (!"".equals(EXTERNAL_URL)) { // handle external help location urlResource = new URL(EXTERNAL_URL + resource.getLocation()); } else { // Add the home folder file reading here if(localFileIsFile) { urlResource = localFile.toURI().toURL(); } else { // handle classpath location urlResource = getClass().getResource(resource.getLocation()); } } } else { // handle external location specified in reg file urlResource = new URL(resource.getLocation()); } if (urlResource == null) { return null; } if (resource.getLocation() != null){ String resLocation = resource.getLocation(); if(localFileIsFile) { resLocation = localFile.getPath(); } doc.add(new Field("location", resLocation, Field.Store.YES, Field.Index.NOT_ANALYZED)); } //doc.add(Field.Keyword("id", resource.getId().toString())); doc.add(new Field("id", resource.getId().toString(), Field.Store.YES, Field.Index.NOT_ANALYZED)); if (getRestConfiguration().getOrganization().equals("sakai")) { Reader reader = new BufferedReader(new InputStreamReader(urlResource.openStream())); try { int readReturn = 0; char[] cbuf = new char[512]; while ((readReturn = reader.read(cbuf, 0, 512)) != -1) { sb.append(cbuf, 0, readReturn); } } finally { reader.close(); } } //doc.add(Field.Text("content", sb.toString())); doc.add(new Field("content", sb.toString(), Field.Store.YES, Field.Index.ANALYZED)); return doc; } /** * Get Table Of Contents Bean. * @return table of contents bean */ public TableOfContentsBean getToc() { if (toc == null) { return null; } String locale = getSelectedLocale().toString(); if (toc.containsKey(locale)) { return toc.get(locale); } else { return toc.get(DEFAULT_LOCALE); } } /** * Set Table Of Contents Bean. * @param toc */ public void setToc(TableOfContentsBean toc) { this.toc.put(DEFAULT_LOCALE, toc); } /** * @see org.sakaiproject.api.app.help.HelpManager#getGlossary() */ public Glossary getGlossary() { return glossary; } /** * Set Glossary. * @param glossary */ public void setGlossary(Glossary glossary) { this.glossary = glossary; } /** * @see org.sakaiproject.api.app.help.HelpManager#storeCategory(org.sakaiproject.api.help.Category) */ public void storeCategory(Category category) { getHibernateTemplate().saveOrUpdate(category); } /** * @see org.sakaiproject.api.app.help.HelpManager#createCategory() */ public Category createCategory() { return new CategoryBean(); } /** * @see org.sakaiproject.api.app.help.HelpManager#createResource() */ public Resource createResource() { return new ResourceBean(); } /** * @see org.sakaiproject.api.app.help.HelpManager#getResourceByDocId(java.lang.String) */ public Resource getResourceByDocId(final String docId) { HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { org.hibernate.Query q = session .getNamedQuery(QUERY_GETRESOURCEBYDOCID); q.setString(DOCID, (docId == null) ? null : docId.toLowerCase()); if (q.list().size() == 0){ return null; } else{ return (Resource) q.list().get(0); } } }; Resource resource = (Resource) getHibernateTemplate().execute(hcb); return resource; } /** * @see org.sakaiproject.api.app.help.HelpManager#getWelcomePage() */ public String getWelcomePage() { initialize(); HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { org.hibernate.Query q = session .getNamedQuery(QUERY_GET_WELCOME_PAGE); q.setString(WELCOME_PAGE, "true"); if (q.list().size() == 0){ return null; } else{ return ((Resource) q.list().get(0)).getDocId(); } } }; return (String) getHibernateTemplate().execute(hcb); } /** * Find a Category by name * @param name * @return Category */ public Category getCategoryByName(final String name) { HibernateCallback hcb = new HibernateCallback() { public Object doInHibernate(Session session) throws HibernateException, SQLException { org.hibernate.Query q = session .getNamedQuery(QUERY_GETCATEGORYBYNAME); q.setString(NAME, (name == null) ? name : name.toLowerCase()); return q.uniqueResult(); } }; return (Category) getHibernateTemplate().execute(hcb); } /** * Index Categories and Resources * @param categories */ private void indexRecursive(IndexWriter indexWriter, Set<Category> categories) { for (Category category: categories) { Set<Resource> resourcesList = category.getResources(); for (Resource resource : resourcesList) { try { Document doc = getDocument((ResourceBean)resource); if (doc != null) { indexWriter.addDocument(doc); LOG.debug("added resource '" + resource.getName() + "', doc count=" + indexWriter.maxDoc()); } else { LOG.debug("failed to add resource '" + "' (" + resource.getName()); } } catch (IOException e) { LOG.error("I/O error while adding resource '" + "' (" + resource.getName() + "): " + e.getMessage(), e); } } Set<Category> subCategories = category.getCategories(); indexRecursive(indexWriter, subCategories); } } /** * Store the mapping of Categories and Resources * @param categories */ private void storeRecursive(Set<Category> categories) { for(Category category: categories) { Set<Resource> resourcesList = category.getResources(); category.setResources(null); for (Resource resource: resourcesList) { resource.setDocId(resource.getDocId().toLowerCase()); resource.setCategory(category); } category.setResources(resourcesList); this.storeCategory(category); Set<Category> subCategories = category.getCategories(); storeRecursive(subCategories); } } /** * Get Support Email Address. * @see org.sakaiproject.api.app.help.HelpManager#getSupportEmailAddress() */ public String getSupportEmailAddress() { return supportEmailAddress; } /** * set Support Email Address. * @param email */ public void setSupportEmailAddress(String email) { this.supportEmailAddress = email; } /** * get tool manager * @return Returns the toolManager. */ public ToolManager getToolManager() { return toolManager; } /** * set tool manager * @param toolManager The toolManager to set. */ public void setToolManager(ToolManager toolManager) { this.toolManager = toolManager; } /** * @param txManager The txManager to set. */ public void setTxManager(HibernateTransactionManager txManager) { this.txManager = txManager; } /** * @see org.sakaiproject.api.app.help.HelpManager#getRestConfiguration() */ public RestConfiguration getRestConfiguration() { return restConfiguration; } /** * set REST configuration * @param restConfiguration */ public void setRestConfiguration(RestConfiguration restConfiguration) { this.restConfiguration = restConfiguration; } /** * Reinitialize help content from UI */ public void reInitialize(){ synchronized (initializedLock) { initialized = Boolean.FALSE; } initialize(); } /** * Synchronize first access to tool. * @see org.sakaiproject.api.app.help.HelpManager#initialize() */ public void initialize() { if (initialized.booleanValue()) { return; } else { synchronized (initializedLock) { if (!initialized.booleanValue()) { dropExistingContent(); // handle external help content EXTERNAL_URL = getServerConfigurationService().getString( "help.location"); if (!"".equals(EXTERNAL_URL)) { if (EXTERNAL_URL.endsWith("/")) { // remove trailing forward slash EXTERNAL_URL = EXTERNAL_URL.substring(0, EXTERNAL_URL.length() - 1); } } // Get all supported locales locales = new ArrayList<String>(); Locale[] sl = serverConfigurationService.getSakaiLocales(); for (Locale element : sl) { locales.add(element.toString()); // Locale toString should generate en_GB type identifiers } // Add default locale locales.add(DEFAULT_LOCALE); toc = new HashMap<String, TableOfContentsBean>(); registerHelpContent(); initialized = Boolean.TRUE; } } } } /** * @see org.sakaiproject.api.app.help.HelpManager#getExternalLocation() */ public String getExternalLocation() { return EXTERNAL_URL; } private void dropExistingContent() { if (LOG.isDebugEnabled()) { LOG.debug("dropExistingContent()"); } TransactionTemplate tt = new TransactionTemplate(txManager); tt.execute(new TransactionCallback() { public Object doInTransaction(TransactionStatus status) { getHibernateTemplate().bulkUpdate("delete CategoryBean"); getHibernateTemplate().flush(); return null; } }); } /** * Returns the user locale * @param prefLocales * The prefLocales to set. */ private Locale getSelectedLocale() { Locale loc = preferencesService.getLocale(userDirectoryService.getCurrentUser().getId()); if (loc != null) { return loc; } else { return Locale.getDefault(); } } /** * Register help content either locally or externally * Index resources in Lucene */ private void registerHelpContent() { if (LOG.isDebugEnabled()) { LOG.debug("registerHelpContent()"); } // register external help docs if (!"".equals(EXTERNAL_URL)) { registerExternalHelpContent(EXTERNAL_URL + "/" + DEFAULT_HELP_FILE); } else { registerStaticContent(); } // Create lucene indexes for each toc (which key is either a locale or 'default') for (String key : toc.keySet()) { String luceneIndexPath = LUCENE_INDEX_PATH + File.separator + key; TableOfContentsBean currentToc = toc.get(key); // create index in lucene IndexWriter writer = null; Date start = new Date(); try { //writer = new IndexWriter(luceneIndexPath, new StandardAnalyzer(Version.LUCENE_40), true); FSDirectory directory = FSDirectory.open(new File(luceneIndexPath)); IndexWriterConfig config = new IndexWriterConfig(Version.LUCENE_40, new StandardAnalyzer(Version.LUCENE_40)); writer = new IndexWriter(directory, config); } catch (IOException e) { LOG.error("failed to create IndexWriter " + e.getMessage(), e); return; } // Index categories and resources indexRecursive(writer, currentToc.getCategories()); try { // writer.optimize(); writer.commit(); writer.close(); } catch (IOException e) { LOG.error("failed to close writer " + e.getMessage(), e); } Date end = new Date(); LOG.info("finished initializing lucene for '" + key + "' in " + (end.getTime() - start.getTime()) + " total milliseconds"); } } /** * register external help content * build document from external reg file * @param externalHelpReg */ public void registerExternalHelpContent(String helpFile) { Set<Category> categories = new TreeSet<Category>(); URL urlResource = null; InputStream ism = null; BufferedInputStream bis = null; try { try { urlResource = new URL(EXTERNAL_URL + "/" + helpFile); ism = urlResource.openStream(); } catch (IOException e) { // Try default help file helpFile = DEFAULT_HELP_FILE; urlResource = new URL(EXTERNAL_URL + "/" + helpFile); ism = urlResource.openStream(); } bis = new BufferedInputStream(ism); DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); dbf.setNamespaceAware(true); DocumentBuilder builder = dbf.newDocumentBuilder(); InputSource is = new org.xml.sax.InputSource(bis); org.w3c.dom.Document xmlDocument = builder.parse(is); Node helpRegNode = (Node) xmlDocument.getDocumentElement(); recursiveExternalReg(helpRegNode, null, categories); // handle corpus docs if (!getRestConfiguration().getOrganization().equals("sakai")){ // get corpus document String corpusXml = getRestConfiguration().getCorpusDocument(); DocumentBuilderFactory dbfCorpus = DocumentBuilderFactory.newInstance(); dbfCorpus.setNamespaceAware(true); DocumentBuilder builderCorpus = dbfCorpus.newDocumentBuilder(); StringReader sReader = new StringReader(corpusXml); InputSource isCorpus = new org.xml.sax.InputSource(sReader); org.w3c.dom.Document xmlDocumentCorpus = builderCorpus.parse(isCorpus); registerCorpusDocs(xmlDocumentCorpus); sReader.close(); } } catch (MalformedURLException e) { LOG.warn("Unable to load external URL: " + EXTERNAL_URL + "/" + helpFile, e); } catch (IOException e) { LOG.warn("I/O error opening external URL: " + EXTERNAL_URL + "/" + helpFile, e); } catch (ParserConfigurationException e) { LOG.error(e.getMessage(), e); } catch (SAXException e) { LOG.error(e.getMessage(), e); } finally { try{ if (bis != null){ bis.close(); } } catch (IOException e){ LOG.error("error closing stream", e); } } // Add to toc map TableOfContentsBean externalToc = new TableOfContentsBean(); externalToc.setCategories(categories); setTableOfContents(externalToc); } /** ** @return Locale based on its string representation (language_region) **/ private Locale getLocaleFromString(String localeString) { return serverConfigurationService.getLocaleFromString(localeString); } /** * Adds help for a specific locale * @param path * @param locale */ private void addToolHelp(String path, String locale) { URL urlResource = null; String classpathUrl = null; String sakaiHomePath = serverConfigurationService.getSakaiHomePath(); String localHelpPath = sakaiHomePath+serverConfigurationService.getString("help.localpath","/help/"); File localFile = null; // find default help file if ( locale.equals(DEFAULT_LOCALE) ) { classpathUrl = path + "/" + HELP_BASENAME + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } } // find localized help file else { classpathUrl = path + "/" + HELP_BASENAME + "_" + locale + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } // If language/region help file not found, look for language-only help file if ( urlResource == null ) { Locale nextLocale = getLocaleFromString(locale); classpathUrl = path + "/" + HELP_BASENAME + "_" + nextLocale.getLanguage() + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } } // If language-only help file not found, look for default help file if ( urlResource == null ) { classpathUrl = path + "/" + HELP_BASENAME + ".xml"; localFile = new File(localHelpPath+classpathUrl); if(localFile.isFile()) { try { urlResource = localFile.toURI().toURL(); } catch (MalformedURLException e) { urlResource = getClass().getResource(classpathUrl); } } else { urlResource = getClass().getResource(classpathUrl); } } } // Url exists? if (urlResource != null) { TableOfContentsBean localizedToc; // Add this tool categories to this tool toc try { org.springframework.core.io.Resource resource = new UrlResource(urlResource); BeanFactory beanFactory = new XmlBeanFactory(resource); TableOfContents tocTemp = (TableOfContents) beanFactory.getBean(TOC_API); Set<Category> categories = tocTemp.getCategories(); storeRecursive(categories); // Get localized toc if (toc.containsKey(locale)) { localizedToc = toc.get(locale); } else { // Create and add localized toc localizedToc = new TableOfContentsBean(); toc.put(locale, localizedToc); } // Update localized toc categories localizedToc.getCategories().addAll(categories); } catch (Exception e) { LOG.warn("Unable to load help index from " + classpathUrl + " : " + e.getMessage()); } } } /** * register local content */ public void registerStaticContent() { // register static content Set<Tool> toolSet = toolManager.findTools(null, null); // find out what we want to ignore List<String> hideHelp = Arrays.asList(StringUtils.split(serverConfigurationService.getString("help.hide"), ",")); if (hideHelp == null) { hideHelp = new ArrayList<String> (); } for (Tool tool : toolSet) { if (tool != null && tool.getId() != null && !hideHelp.contains(tool.getId())) { String[] extraCollections = {}; String toolHelpCollections = tool.getRegisteredConfig().getProperty(TOOLCONFIG_HELP_COLLECTIONS); if (toolHelpCollections != null) { extraCollections = StringUtils.split(toolHelpCollections, ","); } // Loop throughout the locales list for (String locale : locales) { // Add localized tool helps addToolHelp("/" + tool.getId().toLowerCase().replaceAll("\\.", "_"), locale); // Add any other optional collections for (String extraCollection : extraCollections) { addToolHelp("/" + extraCollection, locale); } } } } // Sort the help topics for each locale for (String locale : locales) { TableOfContentsBean localizedToc = toc.get(locale); // Sort this localized toc categories with a TreeSet if (localizedToc != null) { Set<Category> sortedCategories = new TreeSet<Category>(); Set<Category> categories = localizedToc.getCategories(); sortedCategories.addAll(categories); localizedToc.setCategories(sortedCategories); } } } private static int cnt = 0; /** * Parse external help reg doc recursively * @param n * @param category */ public void recursiveExternalReg(Node n, Category category, Set<Category> categories) { if (n == null) { return; } NodeList nodeList = n.getChildNodes(); int nodeListLength = nodeList.getLength(); for (int i = 0; i < nodeListLength; i++) { if (nodeList.item(i).getNodeType() != Node.ELEMENT_NODE) { continue; } Node currentNode = nodeList.item(i); if ("category".equals(currentNode.getNodeName())) { Category childCategory = new CategoryBean(); childCategory.setName(currentNode.getAttributes().getNamedItem("name") .getNodeValue()); if (category != null) { childCategory.setParent(category); category.getCategories().add(childCategory); } storeCategory(childCategory); categories.add(childCategory); LOG.info("adding help category: " + childCategory.getName()); recursiveExternalReg(currentNode, childCategory, categories); } else if ("resource".equals(currentNode.getNodeName())) { Resource resource = new ResourceBean(); NamedNodeMap nnm = currentNode.getAttributes(); if (nnm != null) { // name required resource.setName(nnm.getNamedItem("name").getNodeValue()); if (nnm.getNamedItem("location") != null) { resource.setLocation(nnm.getNamedItem("location").getNodeValue()); } if (nnm.getNamedItem("docId") != null) { resource.setDocId(nnm.getNamedItem("docId").getNodeValue()); } else { resource.setDocId(Integer.valueOf(cnt).toString()); cnt++; } //defaultForTool is an optional attribute if (nnm.getNamedItem("defaultForTool") != null) { resource.setDefaultForTool(nnm.getNamedItem("defaultForTool") .getNodeValue()); } // welcomePage is an optional attribute if (nnm.getNamedItem("welcomePage") != null) { resource.setWelcomePage(nnm.getNamedItem("welcomePage") .getNodeValue().toLowerCase()); } } resource.setCategory(category); category.getResources().add(resource); storeResource(resource); LOG.info("adding help resource: " + resource + " to category: " + category.getName()); recursiveExternalReg(currentNode, category, categories); } } } /** * Parse corpus document * @param doc document */ public void registerCorpusDocs(org.w3c.dom.Document doc) { if (doc == null) { return; } List<String> arrayCorpus = new ArrayList<String>(); NodeList nodeList = doc.getElementsByTagName("id"); int nodeListLength = nodeList.getLength(); for (int i = 0; i < nodeListLength; i++) { Node currentNode = nodeList.item(i); NodeList nlChildren = currentNode.getChildNodes(); for (int j = 0; j < nlChildren.getLength(); j++){ if (nlChildren.item(j).getNodeType() == Node.TEXT_NODE){ arrayCorpus.add(nlChildren.item(j).getNodeValue()); } } } // iterate through corpus docs and add to home category if not already // added by help.xml external registration // if Home category does not exist, then create it if (getCategoryByName("Home") == null){ Category cat = new CategoryBean(); cat.setName("Home"); storeCategory(cat); } for (int i = 0; i < arrayCorpus.size(); i++){ String currentDocId = (String) arrayCorpus.get(i); // if the corpus doc does not already exist from help.xml, then add it to the Home category if (this.getResourceByDocId(currentDocId) == null){ Resource resource = new ResourceBean(); resource.setDocId(currentDocId); resource.setName(currentDocId); Category homeCategory = getCategoryByName("Home"); resource.setCategory(homeCategory); homeCategory.getResources().add(resource); storeResource(resource); } } } }
SAK-27489 - Splitting out the file so it can be closed later git-svn-id: 0100cd0bcd444eee702a49c1e7d1a40aa7e44e69@310791 66ffb92e-73f9-0310-93c1-f5514f145a0a
help/help-component/src/java/org/sakaiproject/component/app/help/HelpManagerImpl.java
SAK-27489 - Splitting out the file so it can be closed later
Java
apache-2.0
1bf45a16ee0fd5706196ce0558b4690eb9cf659e
0
nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/wfswarm-example-arjuna-old,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch,nmcl/scratch
import java.util.*; public class RepairDroid { public static final String INITIAL_INPUT = Integer.toString(DroidMovement.NORTH); public RepairDroid (Vector<String> instructions, boolean debug) { _debug = debug; _theComputer = new Intcode(instructions, INITIAL_INPUT, _debug); _location = new Coordinate(0, 0); // starting location _theMap = new Map(); _theMap.addContent(_location, TileId.TRAVERSE); } public final int moveToOxygenStation () { int numberOfSteps = 0; // create a map first! return numberOfSteps; } public void printGrid () { } private boolean recursiveSearch (Coordinate from) { while (!_theComputer.hasHalted()) { Coordinate coord = new Coordinate(from.getX(), from.getY()+1); _theComputer.setInput(new String(DroidMovement.NORTH)); _theComputer.singleStepExecution(); if (_theComputer.hasOutput()) { int response = Integer.parseInt(_theComputer.getOutput()); switch (response) { case DroidStatus.ARRIVED: { _theMap.addContent(coord, TileId.OXYGEN_STATION); return true; } break; case DroidStatus.COLLISION: { _theMap.addContent(from, TileId.WALL); // didn't move as we hit a wall return false; } break; case DroidStatus.MOVED: { _theMap.addContent(coord, TileId.TRAVERSE); return recursiveSearch(coord); } break; default: System.out.println("Unknown response: "+response); } } else System.out.println("Error - no output after move instruction!"); } return false; } private boolean tryToMove (Coordinate from) { while (!_theComputer.hasHalted()) { Coordinate coord = new Coordinate(from.getX(), from.getY()+1); _theComputer.setInput(new String(DroidMovement.NORTH)); _theComputer.singleStepExecution(); if (_theComputer.hasOutput()) { int response = Integer.parseInt(_theComputer.getOutput()); switch (response) { case DroidStatus.ARRIVED: { _theMap.addContent(coord, TileId.OXYGEN_STATION); return true; } break; case DroidStatus.COLLISION: { _theMap.addContent(from, TileId.WALL); // didn't move as we hit a wall return false; } break; case DroidStatus.MOVED: { _theMap.addContent(coord, TileId.TRAVERSE); return recursiveSearch(coord); } break; default: System.out.println("Unknown response: "+response); } } else System.out.println("Error - no output after move instruction!"); } return false; } private boolean _debug; private Intcode _theComputer; private Coordinate _location; private Map _theMap; }
AdventOfCode/2019/day15/RepairDroid.java
import java.util.*; public class RepairDroid { public static final String INITIAL_INPUT = Integer.toString(DroidMovement.NORTH); public RepairDroid (Vector<String> instructions, boolean debug) { _debug = debug; _theComputer = new Intcode(instructions, INITIAL_INPUT, _debug); _location = new Coordinate(0, 0); // starting location _theMap = new Map(); _theMap.addContent(_location, TileId.TRAVERSE); } public final int moveToOxygenStation () { int numberOfSteps = 0; // create a map first! return numberOfSteps; } public void printGrid () { } private boolean recursiveSearch (Coordinate from) { while (!_theComputer.hasHalted()) { Coordinate coord = new Coordinate(from.getX(), from.getY()+1); _theComputer.setInput(new String(DroidMovement.NORTH)); _theComputer.singleStepExecution(); if (_theComputer.hasOutput()) { int response = Integer.parseInt(_theComputer.getOutput()); switch (response) { case DroidStatus.ARRIVED: { _theMap.addContent(coord, TileId.OXYGEN_STATION); return true; } break; case DroidStatus.COLLISION: { _theMap.addContent(from, TileId.WALL); // didn't move as we hit a wall return false; } break; case DroidStatus.MOVED: { _theMap.addContent(coord, TileId.TRAVERSE); return recursiveSearch(coord); } break; default: System.out.println("Unknown response: "+response); } } else System.out.println("Error - no output after move instruction!"); } return false; } private boolean _debug; private Intcode _theComputer; private Coordinate _location; private Map _theMap; }
Update RepairDroid.java
AdventOfCode/2019/day15/RepairDroid.java
Update RepairDroid.java
Java
apache-2.0
32bb566d0d871571a4af923a78f9a0617e58a3f2
0
joansmith/dmix,hurzl/dmix,joansmith/dmix,jcnoir/dmix,abarisain/dmix,hurzl/dmix,jcnoir/dmix,0359xiaodong/dmix,abarisain/dmix,0359xiaodong/dmix
package org.a0z.mpd; import android.content.Context; import android.util.Log; import org.a0z.mpd.exception.MPDClientException; import org.a0z.mpd.exception.MPDConnectionException; import org.a0z.mpd.exception.MPDServerException; import java.net.InetAddress; import java.net.URL; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * MPD Server controller. * * @version $Id: MPD.java 2716 2004-11-20 17:37:20Z galmeida $ */ public class MPD { protected MPDConnection mpdConnection; protected MPDConnection mpdIdleConnection; protected MPDConnection mpdStatusConnection; protected MPDStatus mpdStatus; protected MPDPlaylist playlist; protected Directory rootDirectory; static protected boolean sortByTrackNumber = true; static protected boolean sortAlbumsByYear = false; static protected boolean showArtistAlbumCount = false; static protected boolean showAlbumTrackCount = true; static protected Context applicationContext = null; static public Context getApplicationContext() { return applicationContext; } static public void setApplicationContext(Context context) { applicationContext = context; } static public void setShowAlbumTrackCount(boolean v) { showAlbumTrackCount = v; } static public void setShowArtistAlbumCount(boolean v) { showArtistAlbumCount = v; } static public void setSortAlbumsByYear(boolean v) { sortAlbumsByYear = v; } static public void setSortByTrackNumber(boolean v) { sortByTrackNumber = v; } static public boolean showAlbumTrackCount() { return showAlbumTrackCount; } static public boolean showArtistAlbumCount() { return showArtistAlbumCount; } static public boolean sortAlbumsByYear() { return sortAlbumsByYear; } static public boolean sortByTrackNumber() { return sortByTrackNumber; } /** * Constructs a new MPD server controller without connection. */ public MPD() { this.playlist = new MPDPlaylist(this); this.mpdStatus = new MPDStatus(); this.rootDirectory = Directory.makeRootDirectory(this); } /** * Constructs a new MPD server controller. * * @param server server address or host name * @param port server port * @throws MPDServerException if an error occur while contacting server */ public MPD(InetAddress server, int port, String password) throws MPDServerException { this(); connect(server, port, password); } /** * Constructs a new MPD server controller. * * @param server server address or host name * @param port server port * @throws MPDServerException if an error occur while contacting server * @throws UnknownHostException */ public MPD(String server, int port, String password) throws MPDServerException, UnknownHostException { this(); connect(server, port, password); } public void add(Album album) throws MPDServerException { add(album, false, false); } public void add(final Album album, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { final ArrayList<Music> songs = new ArrayList<Music>(getSongs(album)); getPlaylist().addAll(songs); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(Artist artist) throws MPDServerException { add(artist, false, false); } public void add(final Artist artist, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { final ArrayList<Music> songs = new ArrayList<Music>(getSongs(artist)); getPlaylist().addAll(songs); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(final Directory directory, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { getPlaylist().add(directory); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(final FilesystemTreeEntry music, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { if (music instanceof Music) { getPlaylist().add(music); } else if (music instanceof PlaylistFile) { getPlaylist().load(music.getFullpath()); } } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(Music music) throws MPDServerException { add(music, false, false); } /** * Adds songs to the queue. It is possible to request a clear of the current * one, and to start the playback once done. * * @param runnable The runnable that will be responsible of inserting the * songs into the queue * @param replace If true, clears the queue before inserting * @param play If true, starts playing once added * @throws MPDServerException */ public void add(Runnable runnable, boolean replace, boolean play) throws MPDServerException { int oldSize = 0; String status = null; if (replace) { status = getStatus().getState(); stop(); getPlaylist().clear(); } else if (play) { oldSize = getPlaylist().size(); } runnable.run(); if (replace) { if (play || MPDStatus.MPD_STATE_PLAYING.equals(status)) { play(); } } else if (play) { try { int id = getPlaylist().getByIndex(oldSize).getSongId(); skipToId(id); play(); } catch (NullPointerException e) { // If song adding fails, don't crash ! } } } public void add(String playlist) throws MPDServerException { add(playlist, false, false); } public void add(final String playlist, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { getPlaylist().load(playlist); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(final URL stream, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { getPlaylist().add(stream); } catch (MPDServerException | MPDClientException e) { e.printStackTrace(); } } }; add(r, replace, play); } protected void addAlbumPaths(List<Album> albums) { if (albums == null || albums.size() == 0) { return; } for (Album a : albums) { try { List<Music> songs = getFirstTrack(a); if (songs.size() > 0) { a.setPath(songs.get(0).getPath()); } } catch (MPDServerException e) { } } } // Returns a pattern where all punctuation characters are escaped. public void addToPlaylist(String playlistName, Album album) throws MPDServerException { addToPlaylist(playlistName, new ArrayList<Music>(getSongs(album))); } public void addToPlaylist(String playlistName, Artist artist) throws MPDServerException { addToPlaylist(playlistName, new ArrayList<Music>(getSongs(artist))); } public void addToPlaylist(String playlistName, Collection<Music> c) throws MPDServerException { if (null == c || c.size() < 1) { return; } for (Music m : c) { getMpdConnection().queueCommand(MPDCommand.MPD_CMD_PLAYLIST_ADD, playlistName, m.getFullpath()); } getMpdConnection().sendCommandQueue(); } public void addToPlaylist(String playlistName, FilesystemTreeEntry entry) throws MPDServerException { getMpdConnection().sendCommand(MPDCommand.MPD_CMD_PLAYLIST_ADD, playlistName, entry.getFullpath()); } public void addToPlaylist(String playlistName, Music music) throws MPDServerException { final ArrayList<Music> songs = new ArrayList<Music>(); songs.add(music); addToPlaylist(playlistName, songs); } /** * Increases or decreases volume by <code>modifier</code> amount. * * @param modifier volume adjustment * @throws MPDServerException if an error occur while contacting server */ public void adjustVolume(int modifier) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); // calculate final volume (clip value with [0, 100]) int vol = getVolume() + modifier; vol = Math.max(MPDCommand.MIN_VOLUME, Math.min(MPDCommand.MAX_VOLUME, vol)); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SET_VOLUME, Integer.toString(vol)); } /* * test whether given album is in given genre */ public boolean albumInGenre(Album album, Genre genre) throws MPDServerException { List<String> response = null; Artist artist = album.getArtist(); response = mpdConnection.sendCommand (new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM, MPDCommand.MPD_TAG_ALBUM, album.getName(), album.hasAlbumArtist() ? MPDCommand.MPD_TAG_ALBUM_ARTIST : MPDCommand.MPD_TAG_ARTIST, (artist == null ? "" : artist.getName()), MPDCommand.MPD_TAG_GENRE, genre.getName())); return (response.size() > 0); } /** * Clears error message. * * @throws MPDServerException if an error occur while contacting server. */ public void clearError() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_CLEARERROR); } /** * Connects to a MPD server. * * @param server server address or host name * @param port server port */ public synchronized final void connect(InetAddress server, int port, String password) throws MPDServerException { if (!isConnected()) { this.mpdConnection = new MPDConnectionMultiSocket(server, port, 3, password, 5000); this.mpdIdleConnection = new MPDConnectionMonoSocket(server, port, password, 0); this.mpdStatusConnection = new MPDConnectionMonoSocket(server, port, password, 5000); } } /** * Connects to a MPD server. * * @param server server address or host name * @param port server port * @throws MPDServerException if an error occur while contacting server * @throws UnknownHostException */ public final void connect(String server, int port, String password) throws MPDServerException, UnknownHostException { InetAddress address = InetAddress.getByName(server); connect(address, port, password); } /** * Connects to a MPD server. * * @param server server address or host name and port (server:port) * @throws MPDServerException if an error occur while contacting server * @throws UnknownHostException */ public final void connect(String server, String password) throws MPDServerException, UnknownHostException { int port = MPDCommand.DEFAULT_MPD_PORT; String host = null; if (server.indexOf(':') != -1) { host = server.substring(0, server.lastIndexOf(':')); port = Integer.parseInt(server.substring(server.lastIndexOf(':') + 1)); } else { host = server; } connect(host, port, password); } public void disableOutput(int id) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTDISABLE, Integer.toString(id)); } /** * Disconnects from server. * * @throws MPDServerException if an error occur while closing connection */ public synchronized void disconnect() throws MPDServerException { MPDServerException ex = null; if (mpdConnection != null && mpdConnection.isConnected()) { try { mpdConnection.sendCommand(MPDCommand.MPD_CMD_CLOSE); } catch (MPDServerException e) { ex = e; } } if (mpdConnection != null && mpdConnection.isConnected()) { try { mpdConnection.disconnect(); } catch (MPDServerException e) { ex = (ex != null) ? ex : e;// Always keep first non null // exception } } if (mpdIdleConnection != null && mpdIdleConnection.isConnected()) { try { mpdIdleConnection.disconnect(); } catch (MPDServerException e) { ex = (ex != null) ? ex : e;// Always keep non null first // exception } } if (mpdStatusConnection != null && mpdStatusConnection.isConnected()) { try { mpdStatusConnection.disconnect(); } catch (MPDServerException e) { ex = (ex != null) ? ex : e;// Always keep non null first // exception } } if (ex != null) { // throw ex; } } public void enableOutput(int id) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTENABLE, Integer.toString(id)); } /** * Similar to <code>search</code>,<code>find</code> looks for exact matches * in the MPD database. * * @param type type of search. Should be one of the following constants: * MPD_FIND_ARTIST, MPD_FIND_ALBUM * @param string case-insensitive locator string. Anything that exactly * matches <code>string</code> will be returned in the results. * @return a Collection of <code>Music</code> * @throws MPDServerException if an error occur while contacting server * @see org.a0z.mpd.Music */ public List<Music> find(String type, String string) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_FIND, type, string); } public List<Music> find(String[] args) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_FIND, args, true); } /* * For all given albums, look for albumartists and create as many albums as * there are albumartists, including "" The server call can be slow for long * album lists */ protected void fixAlbumArtists(List<Album> albums) { if (albums == null || albums.size() == 0) { return; } List<String[]> albumartists = null; try { albumartists = listAlbumArtists(albums); } catch (MPDServerException e) { } if (albumartists == null || albumartists.size() != albums.size()) { return; } List<Album> splitalbums = new ArrayList<Album>(); int i = 0; for (Album a : albums) { String[] aartists = albumartists.get(i); if (aartists.length > 0) { Arrays.sort(aartists); // make sure "" is the first one if (!"".equals(aartists[0])) { // one albumartist, fix this // album a.setArtist(new Artist(aartists[0])); a.setHasAlbumArtist(true); } // do nothing if albumartist is "" if (aartists.length > 1) { // it's more than one album, insert for (int n = 1; n < aartists.length; n++) { Album newalbum = new Album(a.getName(), new Artist(aartists[n]), true); splitalbums.add(newalbum); } } } i++; } albums.addAll(splitalbums); } protected List<Music> genericSearch(String searchCommand, String args[], boolean sort) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); return Music.getMusicFromList(mpdConnection.sendCommand(searchCommand, args), sort); } protected List<Music> genericSearch(String searchCommand, String type, String strToFind) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(searchCommand, type, strToFind); return Music.getMusicFromList(response, true); } public int getAlbumCount(Artist artist, boolean useAlbumArtistTag) throws MPDServerException { return listAlbums(artist.getName(), useAlbumArtistTag).size(); } public int getAlbumCount(String artist, boolean useAlbumArtistTag) throws MPDServerException { if (mpdConnection == null) { throw new MPDServerException("MPD Connection is not established"); } return listAlbums(artist, useAlbumArtistTag).size(); } /** * Recursively retrieves all songs and directories. * * @param dir directory to list. * @throws MPDServerException if an error occur while contacting server. * @return <code>FileStorage</code> with all songs and directories. */ /* * public Directory listAllFiles(String dir) throws MPDServerException { * if(!isConnected()) throw new * MPDServerException("MPD Connection is not established"); List<String> * list = mpdConnection.sendCommand(MPD_CMD_LISTALL, dir); for (String line * : list) { if (line.startsWith("directory: ")) { * rootDirectory.makeDirectory(line.substring("directory: ".length())); } * else if (line.startsWith("file: ")) { rootDirectory.addFile(new * Music(line.substring("file: ".length()))); } } return rootDirectory; } */ protected void getAlbumDetails(List<Album> albums, boolean findYear) throws MPDServerException { for (Album a : albums) { mpdConnection.queueCommand(getAlbumDetailsCommand(a)); } List<String[]> response = mpdConnection.sendCommandQueueSeparated(); if (response.size() != albums.size()) { // Log.d("MPD AlbumDetails", "non matching results "+ // response.size()+" != "+ albums.size()); return; } for (int i = 0; i < response.size(); i++) { String[] list = response.get(i); Album a = albums.get(i); for (String line : list) { if (line.startsWith("songs: ")) { a.setSongCount(Long.parseLong(line.substring("songs: ".length()))); } else if (line.startsWith("playtime: ")) { a.setDuration(Long.parseLong(line.substring("playtime: ".length()))); } } if (findYear) { List<Music> songs = getFirstTrack(a); if (null != songs && !songs.isEmpty()) { a.setYear(songs.get(0).getDate()); a.setPath(songs.get(0).getPath()); } } } } protected MPDCommand getAlbumDetailsCommand(Album album) throws MPDServerException { if (album.hasAlbumArtist()) { return new MPDCommand(MPDCommand.MPD_CMD_COUNT, MPDCommand.MPD_TAG_ALBUM, album.getName(), MPDCommand.MPD_TAG_ALBUM_ARTIST, album.getArtist().getName()); } else { // only get albums without albumartist return new MPDCommand(MPDCommand.MPD_CMD_COUNT, MPDCommand.MPD_TAG_ALBUM, album.getName(), MPDCommand.MPD_TAG_ARTIST, album.getArtist().getName(), MPDCommand.MPD_TAG_ALBUM_ARTIST, ""); } } public List<Album> getAlbums(Artist artist, boolean trackCountNeeded) throws MPDServerException { List<Album> a_albums = getAlbums(artist, trackCountNeeded, false); // 1. the null artist list already contains all albums // 2. the "unknown artist" should not list unknown albumartists if (artist != null && !artist.isUnknown()) { return Item.merged(a_albums, getAlbums(artist, trackCountNeeded, true)); } return a_albums; } public List<Album> getAlbums(Artist artist, boolean trackCountNeeded, boolean useAlbumArtist) throws MPDServerException { if (artist == null) { return getAllAlbums(trackCountNeeded); } List<String> albumNames = listAlbums(artist.getName(), useAlbumArtist); List<Album> albums = new ArrayList<Album>(); if (null == albumNames || albumNames.isEmpty()) { return albums; } for (String album : albumNames) { albums.add(new Album(album, artist, useAlbumArtist)); } if (!useAlbumArtist) { fixAlbumArtists(albums); } // after fixing albumartists if (((MPD.showAlbumTrackCount() && trackCountNeeded) || MPD.sortAlbumsByYear())) { getAlbumDetails(albums, MPD.sortAlbumsByYear()); } if (!MPD.sortAlbumsByYear()) { addAlbumPaths(albums); } Collections.sort(albums); return albums; } /** * @return all Albums */ public List<Album> getAllAlbums(boolean trackCountNeeded) throws MPDServerException { List<String> albumNames = listAlbums(); List<Album> albums = new ArrayList<Album>(); if (null == albumNames || albumNames.isEmpty()) { return albums; // empty list } for (String album : albumNames) { albums.add(new Album(album, null)); } Collections.sort(albums); return albums; } public List<Artist> getArtists() throws MPDServerException { return Item.merged(getArtists(true), getArtists(false)); } public List<Artist> getArtists(boolean useAlbumArtist) throws MPDServerException { List<String> artistNames = useAlbumArtist ? listAlbumArtists() : listArtists(true); List<Artist> artists = new ArrayList<Artist>(); if (null != artistNames && !artistNames.isEmpty()) { for (String artist : artistNames) { artists.add(new Artist(artist, MPD.showArtistAlbumCount() ? getAlbumCount(artist, useAlbumArtist) : 0)); } } Collections.sort(artists); return artists; } public List<Artist> getArtists(Genre genre) throws MPDServerException { return Item.merged(getArtists(genre, false), getArtists(genre, true)); } public List<Artist> getArtists(Genre genre, boolean useAlbumArtist) throws MPDServerException { List<String> artistNames = useAlbumArtist ? listAlbumArtists(genre) : listArtists( genre.getName(), true); List<Artist> artists = new ArrayList<Artist>(); if (null != artistNames && !artistNames.isEmpty()) { for (String artist : artistNames) { artists.add(new Artist(artist, MPD.showArtistAlbumCount() ? getAlbumCount(artist, useAlbumArtist) : 0)); } } Collections.sort(artists); return artists; } /** * Retrieves a database directory listing of the base of the database * directory path. * * @return a <code>Collection</code> of <code>Music</code> and * <code>Directory</code> representing directory entries. * @throws MPDServerException if an error occur while contacting server. * @see Music * @see Directory */ public List<FilesystemTreeEntry> getDir() throws MPDServerException { return getDir(null); } /** * Retrieves a database directory listing of <code>path</code> directory. * * @param path Directory to be listed. * @return a <code>Collection</code> of <code>Music</code> and * <code>Directory</code> representing directory entries. * @throws MPDServerException if an error occur while contacting server. * @see Music * @see Directory */ public List<FilesystemTreeEntry> getDir(String path) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LSDIR, path); LinkedList<String> lineCache = new LinkedList<String>(); LinkedList<FilesystemTreeEntry> result = new LinkedList<FilesystemTreeEntry>(); for (String line : response) { // If we detect a new file element and the line cache isn't empty // dump the linecache into a music item if (line.startsWith("file: ") && lineCache.size() > 0) { result.add(new Music(lineCache)); lineCache.clear(); } if (line.startsWith("playlist: ")) { lineCache.clear(); line = line.substring("playlist: ".length()); result.add(new PlaylistFile(line)); } else if (line.startsWith("directory: ")) { lineCache.clear(); line = line.substring("directory: ".length()); result.add(rootDirectory.makeDirectory(line)); } else { lineCache.add(line); } } if (lineCache.size() > 0) { // Don't create a music object if the line cache does not contain any // It can happen for playlist and directory items with supplementary information for (String line : lineCache) { if (line.startsWith("file: ")) { result.add(new Music(lineCache)); break; } } } return result; } protected List<Music> getFirstTrack(Album album) throws MPDServerException { Artist artist = album.getArtist(); String[] args = new String[6]; args[0] = (artist == null ? "" : album.hasAlbumArtist() ? MPDCommand.MPD_TAG_ALBUM_ARTIST : MPDCommand.MPD_TAG_ARTIST); args[1] = (artist == null ? "" : artist.getName()); args[2] = MPDCommand.MPD_TAG_ALBUM; args[3] = album.getName(); args[4] = "track"; args[5] = "1"; List<Music> songs = find(args); if (null == songs || songs.isEmpty()) { args[5] = "01"; songs = find(args); } if (null == songs || songs.isEmpty()) { args[5] = "1"; songs = search(args); } if (null == songs || songs.isEmpty()) { String[] args2 = Arrays.copyOf(args, 4); // find all tracks songs = find(args2); } return songs; } public List<Genre> getGenres() throws MPDServerException { List<String> genreNames = listGenres(); List<Genre> genres = null; if (null != genreNames && !genreNames.isEmpty()) { genres = new ArrayList<Genre>(); for (String genre : genreNames) { genres.add(new Genre(genre)); } } if (null != genres) { Collections.sort(genres); } return genres; } /** * Retrieves <code>MPDConnection</code>. * * @return <code>MPDConnection</code>. */ public MPDConnection getMpdConnection() { return this.mpdConnection; } MPDConnection getMpdIdleConnection() { return this.mpdIdleConnection; } /** * Returns MPD server version. * * @return MPD Server version. */ public String getMpdVersion() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); int[] version = mpdIdleConnection.getMpdVersion(); StringBuffer sb = new StringBuffer(); for (int i = 0; i < version.length; i++) { sb.append(version[i]); if (i < (version.length - 1)) sb.append("."); } return sb.toString(); } /** * Returns the available outputs * * @return List of available outputs */ public List<MPDOutput> getOutputs() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<MPDOutput> result = new LinkedList<MPDOutput>(); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTS); LinkedList<String> lineCache = new LinkedList<String>(); for (String line : response) { if (line.startsWith("outputid: ")) { if (lineCache.size() != 0) { result.add(new MPDOutput(lineCache)); lineCache.clear(); } } lineCache.add(line); } if (lineCache.size() != 0) { result.add(new MPDOutput(lineCache)); } return result; } /** * Retrieves <code>playlist</code>. * * @return playlist. */ public MPDPlaylist getPlaylist() { return this.playlist; } /** * Returns a list of all available playlists */ public List<Item> getPlaylists() throws MPDServerException { return getPlaylists(false); } /** * Returns a list of all available playlists * * @param sort whether the return list should be sorted */ public List<Item> getPlaylists(boolean sort) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<Item> result = new ArrayList<Item>(); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LISTPLAYLISTS); for (String line : response) { if (line.startsWith("playlist")) result.add(new Playlist(line.substring("playlist: ".length()))); } if (sort) Collections.sort(result); return result; } public List<Music> getPlaylistSongs(String playlistName) throws MPDServerException { String args[] = new String[1]; args[0] = playlistName; List<Music> music = genericSearch(MPDCommand.MPD_CMD_PLAYLIST_INFO, args, false); for (int i = 0; i < music.size(); ++i) { music.get(i).setSongId(i); } return music; } /** * Retrieves root directory. * * @return root directory. */ public Directory getRootDirectory() { return rootDirectory; } public List<Music> getSongs(Album album) throws MPDServerException { List<Music> songs = Music.getMusicFromList (getMpdConnection().sendCommand(getSongsCommand(album)), true); if (album.hasAlbumArtist()) { // remove songs that don't have this albumartist // (mpd >=0.18 puts them in) String artistname = album.getArtist().getName(); for (int i = songs.size() - 1; i >= 0; i--) { if (!(artistname.equals(songs.get(i).getAlbumArtist()))) { songs.remove(i); } } } if (null != songs) { Collections.sort(songs); } return songs; } public List<Music> getSongs(Artist artist) throws MPDServerException { List<Album> albums = getAlbums(artist, false); List<Music> songs = new ArrayList<Music>(); for (Album a : albums) { songs.addAll(getSongs(a)); } return songs; } public MPDCommand getSongsCommand(Album album) { String albumname = album.getName(); Artist artist = album.getArtist(); if (null == artist) { // get songs for ANY artist return new MPDCommand(MPDCommand.MPD_CMD_FIND, MPDCommand.MPD_TAG_ALBUM, albumname); } String artistname = artist.getName(); if (album.hasAlbumArtist()) { return new MPDCommand(MPDCommand.MPD_CMD_FIND, MPDCommand.MPD_TAG_ALBUM, albumname, MPDCommand.MPD_TAG_ALBUM_ARTIST, artistname); } else { return new MPDCommand(MPDCommand.MPD_CMD_FIND, MPDCommand.MPD_TAG_ALBUM, albumname, MPDCommand.MPD_TAG_ARTIST, artistname, MPDCommand.MPD_TAG_ALBUM_ARTIST, ""); } } /** * Retrieves statistics for the connected server. * * @return statistics for the connected server. * @throws MPDServerException if an error occur while contacting server. */ public MPDStatistics getStatistics() throws MPDServerException { List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_STATISTICS); return new MPDStatistics(response); } /** * Retrieves status of the connected server. * * @return status of the connected server. * @throws MPDServerException if an error occur while contacting server. */ public MPDStatus getStatus() throws MPDServerException { return getStatus(false); } /** * Retrieves status of the connected server. * * @return status of the connected server. * @throws MPDServerException if an error occur while contacting server. */ public MPDStatus getStatus(boolean forceRefresh) throws MPDServerException { if (forceRefresh || mpdStatus == null || mpdStatus.getState() == null) { if (!isConnected()) { throw new MPDConnectionException("MPD Connection is not established"); } List<String> response = mpdStatusConnection.sendCommand(MPDCommand.MPD_CMD_STATUS); mpdStatus.updateStatus(response); } return mpdStatus; } /** * Retrieves current volume. * * @return current volume. * @throws MPDServerException if an error occur while contacting server. */ public int getVolume() throws MPDServerException { return this.getStatus().getVolume(); } /** * Returns true when connected and false when not connected. * * @return true when connected and false when not connected */ public boolean isConnected() { return mpdIdleConnection != null && mpdStatusConnection != null && mpdConnection != null && mpdIdleConnection.isConnected(); } public boolean isMpdConnectionNull() { return (this.mpdConnection == null); } public List<String> listAlbumArtists() throws MPDServerException { return listAlbumArtists(true); } /** * List all album artist names from database. * * @return album artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbumArtists(boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("albumartist: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } public List<String> listAlbumArtists(Genre genre) throws MPDServerException { return listAlbumArtists(genre, true); } /** * List all album artist names from database. * * @return album artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbumArtists(Genre genre, boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST, MPDCommand.MPD_TAG_GENRE, genre.getName()); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("albumartist: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } public List<String[]> listAlbumArtists(List<Album> albums) throws MPDServerException { for (Album a : albums) { mpdConnection.queueCommand(new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST, MPDCommand.MPD_TAG_ARTIST, a.getArtist().getName(), MPDCommand.MPD_TAG_ALBUM, a.getName())); } List<String[]> response = mpdConnection.sendCommandQueueSeparated(); if (response.size() != albums.size()) { Log.d("MPD listAlbumArtists", "ERROR"); return null; } for (int i = 0; i < response.size(); i++) { for (int j = 0; j < response.get(i).length; j++) { response.get(i)[j] = response.get(i)[j].substring("AlbumArtist: ".length()); } } return response; } /** * List all albums from database. * * @return <code>Collection</code> with all album names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums() throws MPDServerException { return listAlbums(null, false, true); } /** * List all albums from database. * * @param useAlbumArtist use AlbumArtist instead of Artist * @return <code>Collection</code> with all album names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums(boolean useAlbumArtist) throws MPDServerException { return listAlbums(null, useAlbumArtist, true); } /** * List all albums from a given artist, including an entry for songs with no * album tag. * * @param artist artist to list albums * @param useAlbumArtist use AlbumArtist instead of Artist * @return <code>Collection</code> with all album names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums(String artist, boolean useAlbumArtist) throws MPDServerException { return listAlbums(artist, useAlbumArtist, true); } /** * List all albums from a given artist. * * @param artist artist to list albums * @param useAlbumArtist use AlbumArtist instead of Artist * @param includeUnknownAlbum include an entry for songs with no album tag * @return <code>Collection</code> with all album names from the given * artist present in database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums(String artist, boolean useAlbumArtist, boolean includeUnknownAlbum) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); boolean foundSongWithoutAlbum = false; List<String> response = mpdConnection.sendCommand (listAlbumsCommand(artist, useAlbumArtist)); ArrayList<String> result = new ArrayList<String>(); for (String line : response) { String name = line.substring("Album: ".length()); if (name.length() > 0) { result.add(name); } else { foundSongWithoutAlbum = true; } } // add a single blank entry to host all songs without an album set if ((includeUnknownAlbum == true) && (foundSongWithoutAlbum == true)) { result.add(""); } Collections.sort(result); return result; } /* * get raw command String for listAlbums */ public MPDCommand listAlbumsCommand(String artist, boolean useAlbumArtist) { if (useAlbumArtist) { return new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM, MPDCommand.MPD_TAG_ALBUM_ARTIST, artist); } else { return new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM, artist); } } /** * List all artist names from database. * * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists() throws MPDServerException { return listArtists(true); } /** * List all artist names from database. * * @param sortInsensitive boolean for insensitive sort when true * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists(boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ARTIST); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { result.add(s.substring("Artist: ".length())); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } /* * List all albumartist or artist names of all given albums from database. * @return list of array of artist names for each album. * @throws MPDServerException if an error occurs while contacting server. */ public List<String[]> listArtists(List<Album> albums, boolean albumArtist) throws MPDServerException { if (!isConnected()) { throw new MPDServerException("MPD Connection is not established"); } ArrayList<String[]> result = new ArrayList<String[]>(); if (albums == null) { return result; } for (Album a : albums) { // When adding album artist to existing artist check that the artist // matches if (albumArtist && a.getArtist() != null && !a.getArtist().isUnknown()) { mpdConnection.queueCommand (new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST, MPDCommand.MPD_TAG_ALBUM, a.getName(), MPDCommand.MPD_TAG_ARTIST, a.getArtist().getName())); } else { mpdConnection.queueCommand (new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, (albumArtist ? MPDCommand.MPD_TAG_ALBUM_ARTIST : MPDCommand.MPD_TAG_ARTIST), MPDCommand.MPD_TAG_ALBUM, a .getName())); } } List<String[]> responses = mpdConnection.sendCommandQueueSeparated(); for (String[] r : responses) { ArrayList<String> albumresult = new ArrayList<String>(); for (String s : r) { String name = s.substring((albumArtist ? "AlbumArtist: " : "Artist: ").length()); albumresult.add(name); } result.add(albumresult.toArray(new String[albumresult.size()])); } return result; } /** * List all artist names from database. * * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists(String genre) throws MPDServerException { return listArtists(genre, true); } /** * List all artist names from database. * * @param sortInsensitive boolean for insensitive sort when true * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists(String genre, boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ARTIST, MPDCommand.MPD_TAG_GENRE, genre); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("Artist: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } /** * List all genre names from database. * * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listGenres() throws MPDServerException { return listGenres(true); } /** * List all genre names from database. * * @param sortInsensitive boolean for insensitive sort when true * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listGenres(boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_GENRE); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("Genre: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } public void movePlaylistSong(String playlistName, int from, int to) throws MPDServerException { getMpdConnection().sendCommand(MPDCommand.MPD_CMD_PLAYLIST_MOVE, playlistName, Integer.toString(from), Integer.toString(to)); } /** * Jumps to next playlist track. * * @throws MPDServerException if an error occur while contacting server. */ public void next() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_NEXT); } /** * Pauses/Resumes music playing. * * @throws MPDServerException if an error occur while contacting server. */ public void pause() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PAUSE); } /** * Starts playing music. * * @throws MPDServerException if an error occur while contacting server. */ public void play() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PLAY); } /** * Plays previous playlist music. * * @throws MPDServerException if an error occur while contacting server.. */ public void previous() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PREV); } /** * Tells server to refresh database. * * @throws MPDServerException if an error occur while contacting server. */ public void refreshDatabase() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_REFRESH); } /** * Tells server to refresh database. * * @throws MPDServerException if an error occur while contacting server. */ public void refreshDatabase(String folder) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_REFRESH, folder); } public void removeFromPlaylist(String playlistName, Integer pos) throws MPDServerException { getMpdConnection().sendCommand(MPDCommand.MPD_CMD_PLAYLIST_DEL, playlistName, Integer.toString(pos)); } /** * Similar to <code>find</code>,<code>search</code> looks for partial * matches in the MPD database. * * @param type type of search. Should be one of the following constants: * MPD_SEARCH_ARTIST, MPD_SEARCH_TITLE, MPD_SEARCH_ALBUM, * MPD_SEARCH_FILENAME * @param string case-insensitive locator string. Anything that contains * <code>string</code> will be returned in the results. * @return a Collection of <code>Music</code>. * @throws MPDServerException if an error occur while contacting server. * @see org.a0z.mpd.Music */ public Collection<Music> search(String type, String string) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_SEARCH, type, string); } public List<Music> search(String[] args) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_SEARCH, args, true); } /** * Seeks current music to the position. * * @param position song position in seconds * @throws MPDServerException if an error occur while contacting server. */ public void seek(long position) throws MPDServerException { seekById(this.getStatus().getSongId(), position); } /** * Seeks music to the position. * * @param songId music id in playlist. * @param position song position in seconds. * @throws MPDServerException if an error occur while contacting server. */ public void seekById(int songId, long position) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SEEK_ID, Integer.toString(songId), Long.toString(position)); } /** * Seeks music to the position. * * @param index music position in playlist. * @param position song position in seconds. * @throws MPDServerException if an error occur while contacting server. */ public void seekByIndex(int index, long position) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SEEK, Integer.toString(index), Long.toString(position)); } /** * Enabled or disable consuming. * * @param consume if true song consuming will be enabled, if false song * consuming will be disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setConsume(boolean consume) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_CONSUME, consume ? "1" : "0"); } /** * Sets cross-fade. * * @param time cross-fade time in seconds. 0 to disable cross-fade. * @throws MPDServerException if an error occur while contacting server. */ public void setCrossfade(int time) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection .sendCommand(MPDCommand.MPD_CMD_CROSSFADE, Integer.toString(Math.max(0, time))); } /** * Enabled or disable random. * * @param random if true random will be enabled, if false random will be * disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setRandom(boolean random) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_RANDOM, random ? "1" : "0"); } /** * Enabled or disable repeating. * * @param repeat if true repeating will be enabled, if false repeating will * be disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setRepeat(boolean repeat) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_REPEAT, repeat ? "1" : "0"); } /** * Enabled or disable single mode. * * @param single if true single mode will be enabled, if false single mode * will be disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setSingle(boolean single) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SINGLE, single ? "1" : "0"); } /** * Sets volume to <code>volume</code>. * * @param volume new volume value, must be in 0-100 range. * @throws MPDServerException if an error occur while contacting server. */ public void setVolume(int volume) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); int vol = Math.max(MPDCommand.MIN_VOLUME, Math.min(MPDCommand.MAX_VOLUME, volume)); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SET_VOLUME, Integer.toString(vol)); } /** * Kills server. * * @throws MPDServerException if an error occur while contacting server. */ public void shutdown() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_KILL); } /** * Skip to song with specified <code>id</code>. * * @param id song id. * @throws MPDServerException if an error occur while contacting server. */ public void skipToId(int id) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PLAY_ID, Integer.toString(id)); } /** * Jumps to track <code>position</code> from playlist. * * @param position track number. * @throws MPDServerException if an error occur while contacting server. * @see #skipToId(int) */ public void skipToPosition(int position) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PLAY, Integer.toString(position)); } /** * Stops music playing. * * @throws MPDServerException if an error occur while contacting server. */ public void stop() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_STOP); } /** * Wait for server changes using "idle" command on the dedicated connection. * * @return Data read from the server. * @throws MPDServerException if an error occur while contacting server */ public List<String> waitForChanges() throws MPDServerException { while (mpdIdleConnection != null && mpdIdleConnection.isConnected()) { List<String> data = mpdIdleConnection .sendAsyncCommand(MPDCommand.MPD_CMD_IDLE); if (data.isEmpty()) { continue; } return data; } throw new MPDConnectionException("IDLE connection lost"); } }
JMPDComm/src/org/a0z/mpd/MPD.java
package org.a0z.mpd; import android.content.Context; import android.util.Log; import org.a0z.mpd.exception.MPDClientException; import org.a0z.mpd.exception.MPDConnectionException; import org.a0z.mpd.exception.MPDServerException; import java.net.InetAddress; import java.net.URL; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.LinkedList; import java.util.List; /** * MPD Server controller. * * @version $Id: MPD.java 2716 2004-11-20 17:37:20Z galmeida $ */ public class MPD { protected MPDConnection mpdConnection; protected MPDConnection mpdIdleConnection; protected MPDConnection mpdStatusConnection; protected MPDStatus mpdStatus; protected MPDPlaylist playlist; protected Directory rootDirectory; static protected boolean sortByTrackNumber = true; static protected boolean sortAlbumsByYear = false; static protected boolean showArtistAlbumCount = false; static protected boolean showAlbumTrackCount = true; static protected Context applicationContext = null; static public Context getApplicationContext() { return applicationContext; } static public void setApplicationContext(Context context) { applicationContext = context; } static public void setShowAlbumTrackCount(boolean v) { showAlbumTrackCount = v; } static public void setShowArtistAlbumCount(boolean v) { showArtistAlbumCount = v; } static public void setSortAlbumsByYear(boolean v) { sortAlbumsByYear = v; } static public void setSortByTrackNumber(boolean v) { sortByTrackNumber = v; } static public boolean showAlbumTrackCount() { return showAlbumTrackCount; } static public boolean showArtistAlbumCount() { return showArtistAlbumCount; } static public boolean sortAlbumsByYear() { return sortAlbumsByYear; } static public boolean sortByTrackNumber() { return sortByTrackNumber; } /** * Constructs a new MPD server controller without connection. */ public MPD() { this.playlist = new MPDPlaylist(this); this.mpdStatus = new MPDStatus(); this.rootDirectory = Directory.makeRootDirectory(this); } /** * Constructs a new MPD server controller. * * @param server server address or host name * @param port server port * @throws MPDServerException if an error occur while contacting server */ public MPD(InetAddress server, int port, String password) throws MPDServerException { this(); connect(server, port, password); } /** * Constructs a new MPD server controller. * * @param server server address or host name * @param port server port * @throws MPDServerException if an error occur while contacting server * @throws UnknownHostException */ public MPD(String server, int port, String password) throws MPDServerException, UnknownHostException { this(); connect(server, port, password); } public void add(Album album) throws MPDServerException { add(album, false, false); } public void add(final Album album, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { final ArrayList<Music> songs = new ArrayList<Music>(getSongs(album)); getPlaylist().addAll(songs); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(Artist artist) throws MPDServerException { add(artist, false, false); } public void add(final Artist artist, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { final ArrayList<Music> songs = new ArrayList<Music>(getSongs(artist)); getPlaylist().addAll(songs); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(final Directory directory, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { getPlaylist().add(directory); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(final FilesystemTreeEntry music, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { if (music instanceof Music) { getPlaylist().add(music); } else if (music instanceof PlaylistFile) { getPlaylist().load(music.getFullpath()); } } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(Music music) throws MPDServerException { add(music, false, false); } /** * Adds songs to the queue. It is possible to request a clear of the current * one, and to start the playback once done. * * @param runnable The runnable that will be responsible of inserting the * songs into the queue * @param replace If true, clears the queue before inserting * @param play If true, starts playing once added * @throws MPDServerException */ public void add(Runnable runnable, boolean replace, boolean play) throws MPDServerException { int oldSize = 0; String status = null; if (replace) { status = getStatus().getState(); stop(); getPlaylist().clear(); } else if (play) { oldSize = getPlaylist().size(); } runnable.run(); if (replace) { if (play || MPDStatus.MPD_STATE_PLAYING.equals(status)) { play(); } } else if (play) { try { int id = getPlaylist().getByIndex(oldSize).getSongId(); skipToId(id); play(); } catch (NullPointerException e) { // If song adding fails, don't crash ! } } } public void add(String playlist) throws MPDServerException { add(playlist, false, false); } public void add(final String playlist, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { getPlaylist().load(playlist); } catch (MPDServerException e) { e.printStackTrace(); } } }; add(r, replace, play); } public void add(final URL stream, boolean replace, boolean play) throws MPDServerException { final Runnable r = new Runnable() { @Override public void run() { try { getPlaylist().add(stream); } catch (MPDServerException | MPDClientException e) { e.printStackTrace(); } } }; add(r, replace, play); } protected void addAlbumPaths(List<Album> albums) { if (albums == null || albums.size() == 0) { return; } for (Album a : albums) { try { List<Music> songs = getFirstTrack(a); if (songs.size() > 0) { a.setPath(songs.get(0).getPath()); } } catch (MPDServerException e) { } } } // Returns a pattern where all punctuation characters are escaped. public void addToPlaylist(String playlistName, Album album) throws MPDServerException { addToPlaylist(playlistName, new ArrayList<Music>(getSongs(album))); } public void addToPlaylist(String playlistName, Artist artist) throws MPDServerException { addToPlaylist(playlistName, new ArrayList<Music>(getSongs(artist))); } public void addToPlaylist(String playlistName, Collection<Music> c) throws MPDServerException { if (null == c || c.size() < 1) { return; } for (Music m : c) { getMpdConnection().queueCommand(MPDCommand.MPD_CMD_PLAYLIST_ADD, playlistName, m.getFullpath()); } getMpdConnection().sendCommandQueue(); } public void addToPlaylist(String playlistName, FilesystemTreeEntry entry) throws MPDServerException { getMpdConnection().sendCommand(MPDCommand.MPD_CMD_PLAYLIST_ADD, playlistName, entry.getFullpath()); } public void addToPlaylist(String playlistName, Music music) throws MPDServerException { final ArrayList<Music> songs = new ArrayList<Music>(); songs.add(music); addToPlaylist(playlistName, songs); } /** * Increases or decreases volume by <code>modifier</code> amount. * * @param modifier volume adjustment * @throws MPDServerException if an error occur while contacting server */ public void adjustVolume(int modifier) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); // calculate final volume (clip value with [0, 100]) int vol = getVolume() + modifier; vol = Math.max(MPDCommand.MIN_VOLUME, Math.min(MPDCommand.MAX_VOLUME, vol)); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SET_VOLUME, Integer.toString(vol)); } /* * test whether given album is in given genre */ public boolean albumInGenre(Album album, Genre genre) throws MPDServerException { List<String> response = null; Artist artist = album.getArtist(); response = mpdConnection.sendCommand (new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM, MPDCommand.MPD_TAG_ALBUM, album.getName(), album.hasAlbumArtist() ? MPDCommand.MPD_TAG_ALBUM_ARTIST : MPDCommand.MPD_TAG_ARTIST, (artist == null ? "" : artist.getName()), MPDCommand.MPD_TAG_GENRE, genre.getName())); return (response.size() > 0); } /** * Clears error message. * * @throws MPDServerException if an error occur while contacting server. */ public void clearError() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_CLEARERROR); } /** * Connects to a MPD server. * * @param server server address or host name * @param port server port */ public synchronized final void connect(InetAddress server, int port, String password) throws MPDServerException { if (!isConnected()) { this.mpdConnection = new MPDConnectionMultiSocket(server, port, 3, password, 5000); this.mpdIdleConnection = new MPDConnectionMonoSocket(server, port, password, 0); this.mpdStatusConnection = new MPDConnectionMonoSocket(server, port, password, 5000); } } /** * Connects to a MPD server. * * @param server server address or host name * @param port server port * @throws MPDServerException if an error occur while contacting server * @throws UnknownHostException */ public final void connect(String server, int port, String password) throws MPDServerException, UnknownHostException { InetAddress address = InetAddress.getByName(server); connect(address, port, password); } /** * Connects to a MPD server. * * @param server server address or host name and port (server:port) * @throws MPDServerException if an error occur while contacting server * @throws UnknownHostException */ public final void connect(String server, String password) throws MPDServerException, UnknownHostException { int port = MPDCommand.DEFAULT_MPD_PORT; String host = null; if (server.indexOf(':') != -1) { host = server.substring(0, server.lastIndexOf(':')); port = Integer.parseInt(server.substring(server.lastIndexOf(':') + 1)); } else { host = server; } connect(host, port, password); } public void disableOutput(int id) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTDISABLE, Integer.toString(id)); } /** * Disconnects from server. * * @throws MPDServerException if an error occur while closing connection */ public synchronized void disconnect() throws MPDServerException { MPDServerException ex = null; if (mpdConnection != null && mpdConnection.isConnected()) { try { mpdConnection.sendCommand(MPDCommand.MPD_CMD_CLOSE); } catch (MPDServerException e) { ex = e; } } if (mpdConnection != null && mpdConnection.isConnected()) { try { mpdConnection.disconnect(); } catch (MPDServerException e) { ex = (ex != null) ? ex : e;// Always keep first non null // exception } } if (mpdIdleConnection != null && mpdIdleConnection.isConnected()) { try { mpdIdleConnection.disconnect(); } catch (MPDServerException e) { ex = (ex != null) ? ex : e;// Always keep non null first // exception } } if (mpdStatusConnection != null && mpdStatusConnection.isConnected()) { try { mpdStatusConnection.disconnect(); } catch (MPDServerException e) { ex = (ex != null) ? ex : e;// Always keep non null first // exception } } if (ex != null) { // throw ex; } } public void enableOutput(int id) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTENABLE, Integer.toString(id)); } /** * Similar to <code>search</code>,<code>find</code> looks for exact matches * in the MPD database. * * @param type type of search. Should be one of the following constants: * MPD_FIND_ARTIST, MPD_FIND_ALBUM * @param string case-insensitive locator string. Anything that exactly * matches <code>string</code> will be returned in the results. * @return a Collection of <code>Music</code> * @throws MPDServerException if an error occur while contacting server * @see org.a0z.mpd.Music */ public List<Music> find(String type, String string) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_FIND, type, string); } public List<Music> find(String[] args) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_FIND, args, true); } /* * For all given albums, look for albumartists and create as many albums as * there are albumartists, including "" The server call can be slow for long * album lists */ protected void fixAlbumArtists(List<Album> albums) { if (albums == null || albums.size() == 0) { return; } List<String[]> albumartists = null; try { albumartists = listAlbumArtists(albums); } catch (MPDServerException e) { } if (albumartists == null || albumartists.size() != albums.size()) { return; } List<Album> splitalbums = new ArrayList<Album>(); int i = 0; for (Album a : albums) { String[] aartists = albumartists.get(i); if (aartists.length > 0) { Arrays.sort(aartists); // make sure "" is the first one if (!"".equals(aartists[0])) { // one albumartist, fix this // album a.setArtist(new Artist(aartists[0])); a.setHasAlbumArtist(true); } // do nothing if albumartist is "" if (aartists.length > 1) { // it's more than one album, insert for (int n = 1; n < aartists.length; n++) { Album newalbum = new Album(a.getName(), new Artist(aartists[n]), true); splitalbums.add(newalbum); } } } i++; } albums.addAll(splitalbums); } protected List<Music> genericSearch(String searchCommand, String args[], boolean sort) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); return Music.getMusicFromList(mpdConnection.sendCommand(searchCommand, args), sort); } protected List<Music> genericSearch(String searchCommand, String type, String strToFind) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(searchCommand, type, strToFind); return Music.getMusicFromList(response, true); } public int getAlbumCount(Artist artist, boolean useAlbumArtistTag) throws MPDServerException { return listAlbums(artist.getName(), useAlbumArtistTag).size(); } public int getAlbumCount(String artist, boolean useAlbumArtistTag) throws MPDServerException { if (mpdConnection == null) { throw new MPDServerException("MPD Connection is not established"); } return listAlbums(artist, useAlbumArtistTag).size(); } /** * Recursively retrieves all songs and directories. * * @param dir directory to list. * @throws MPDServerException if an error occur while contacting server. * @return <code>FileStorage</code> with all songs and directories. */ /* * public Directory listAllFiles(String dir) throws MPDServerException { * if(!isConnected()) throw new * MPDServerException("MPD Connection is not established"); List<String> * list = mpdConnection.sendCommand(MPD_CMD_LISTALL, dir); for (String line * : list) { if (line.startsWith("directory: ")) { * rootDirectory.makeDirectory(line.substring("directory: ".length())); } * else if (line.startsWith("file: ")) { rootDirectory.addFile(new * Music(line.substring("file: ".length()))); } } return rootDirectory; } */ protected void getAlbumDetails(List<Album> albums, boolean findYear) throws MPDServerException { for (Album a : albums) { mpdConnection.queueCommand(getAlbumDetailsCommand(a)); } List<String[]> response = mpdConnection.sendCommandQueueSeparated(); if (response.size() != albums.size()) { // Log.d("MPD AlbumDetails", "non matching results "+ // response.size()+" != "+ albums.size()); return; } for (int i = 0; i < response.size(); i++) { String[] list = response.get(i); Album a = albums.get(i); for (String line : list) { if (line.startsWith("songs: ")) { a.setSongCount(Long.parseLong(line.substring("songs: ".length()))); } else if (line.startsWith("playtime: ")) { a.setDuration(Long.parseLong(line.substring("playtime: ".length()))); } } if (findYear) { List<Music> songs = getFirstTrack(a); if (null != songs && !songs.isEmpty()) { a.setYear(songs.get(0).getDate()); a.setPath(songs.get(0).getPath()); } } } } protected MPDCommand getAlbumDetailsCommand(Album album) throws MPDServerException { if (album.hasAlbumArtist()) { return new MPDCommand(MPDCommand.MPD_CMD_COUNT, MPDCommand.MPD_TAG_ALBUM, album.getName(), MPDCommand.MPD_TAG_ALBUM_ARTIST, album.getArtist().getName()); } else { // only get albums without albumartist return new MPDCommand(MPDCommand.MPD_CMD_COUNT, MPDCommand.MPD_TAG_ALBUM, album.getName(), MPDCommand.MPD_TAG_ARTIST, album.getArtist().getName(), MPDCommand.MPD_TAG_ALBUM_ARTIST, ""); } } public List<Album> getAlbums(Artist artist, boolean trackCountNeeded) throws MPDServerException { List<Album> a_albums = getAlbums(artist, trackCountNeeded, false); // 1. the null artist list already contains all albums // 2. the "unknown artist" should not list unknown albumartists if (artist != null && !artist.isUnknown()) { return Item.merged(a_albums, getAlbums(artist, trackCountNeeded, true)); } return a_albums; } public List<Album> getAlbums(Artist artist, boolean trackCountNeeded, boolean useAlbumArtist) throws MPDServerException { if (artist == null) { return getAllAlbums(trackCountNeeded); } List<String> albumNames = listAlbums(artist.getName(), useAlbumArtist); List<Album> albums = new ArrayList<Album>(); if (null == albumNames || albumNames.isEmpty()) { return albums; } for (String album : albumNames) { albums.add(new Album(album, artist, useAlbumArtist)); } if (!useAlbumArtist) { fixAlbumArtists(albums); } // after fixing albumartists if (((MPD.showAlbumTrackCount() && trackCountNeeded) || MPD.sortAlbumsByYear())) { getAlbumDetails(albums, MPD.sortAlbumsByYear()); } if (!MPD.sortAlbumsByYear()) { addAlbumPaths(albums); } Collections.sort(albums); return albums; } /** * @return all Albums */ public List<Album> getAllAlbums(boolean trackCountNeeded) throws MPDServerException { List<String> albumNames = listAlbums(); List<Album> albums = new ArrayList<Album>(); if (null == albumNames || albumNames.isEmpty()) { return albums; // empty list } for (String album : albumNames) { albums.add(new Album(album, null)); } Collections.sort(albums); return albums; } public List<Artist> getArtists() throws MPDServerException { return Item.merged(getArtists(true), getArtists(false)); } public List<Artist> getArtists(boolean useAlbumArtist) throws MPDServerException { List<String> artistNames = useAlbumArtist ? listAlbumArtists() : listArtists(true); List<Artist> artists = new ArrayList<Artist>(); if (null != artistNames && !artistNames.isEmpty()) { for (String artist : artistNames) { artists.add(new Artist(artist, MPD.showArtistAlbumCount() ? getAlbumCount(artist, useAlbumArtist) : 0)); } } Collections.sort(artists); return artists; } public List<Artist> getArtists(Genre genre) throws MPDServerException { return Item.merged(getArtists(genre, false), getArtists(genre, true)); } public List<Artist> getArtists(Genre genre, boolean useAlbumArtist) throws MPDServerException { List<String> artistNames = useAlbumArtist ? listAlbumArtists(genre) : listArtists( genre.getName(), true); List<Artist> artists = new ArrayList<Artist>(); if (null != artistNames && !artistNames.isEmpty()) { for (String artist : artistNames) { artists.add(new Artist(artist, MPD.showArtistAlbumCount() ? getAlbumCount(artist, useAlbumArtist) : 0)); } } Collections.sort(artists); return artists; } /** * Retrieves a database directory listing of the base of the database * directory path. * * @return a <code>Collection</code> of <code>Music</code> and * <code>Directory</code> representing directory entries. * @throws MPDServerException if an error occur while contacting server. * @see Music * @see Directory */ public List<FilesystemTreeEntry> getDir() throws MPDServerException { return getDir(null); } /** * Retrieves a database directory listing of <code>path</code> directory. * * @param path Directory to be listed. * @return a <code>Collection</code> of <code>Music</code> and * <code>Directory</code> representing directory entries. * @throws MPDServerException if an error occur while contacting server. * @see Music * @see Directory */ public List<FilesystemTreeEntry> getDir(String path) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LSDIR, path); LinkedList<String> lineCache = new LinkedList<String>(); LinkedList<FilesystemTreeEntry> result = new LinkedList<FilesystemTreeEntry>(); for (String line : response) { // If we detect a new file element and the line cache isn't empty // dump the linecache into a music item if (line.startsWith("file: ") && lineCache.size() > 0) { result.add(new Music(lineCache)); lineCache.clear(); } if (line.startsWith("playlist: ")) { lineCache.clear(); line = line.substring("playlist: ".length()); result.add(new PlaylistFile(line)); } else if (line.startsWith("directory: ")) { lineCache.clear(); line = line.substring("directory: ".length()); result.add(rootDirectory.makeDirectory(line)); } else { lineCache.add(line); } } if (lineCache.size() > 0) { // Don't create a music object if the line cache does not contain any // It can happen for playlist and directory items with supplementary information for (String line : lineCache) { if (line.startsWith("file: ")) { result.add(new Music(lineCache)); break; } } } return result; } protected List<Music> getFirstTrack(Album album) throws MPDServerException { Artist artist = album.getArtist(); String[] args = new String[6]; args[0] = (artist == null ? "" : album.hasAlbumArtist() ? MPDCommand.MPD_TAG_ALBUM_ARTIST : MPDCommand.MPD_TAG_ARTIST); args[1] = (artist == null ? "" : artist.getName()); args[2] = MPDCommand.MPD_TAG_ALBUM; args[3] = album.getName(); args[4] = "track"; args[5] = "1"; List<Music> songs = find(args); if (null == songs || songs.isEmpty()) { args[5] = "01"; songs = find(args); } if (null == songs || songs.isEmpty()) { args[5] = "1"; songs = search(args); } if (null == songs || songs.isEmpty()) { String[] args2 = Arrays.copyOf(args, 4); // find all tracks songs = find(args2); } return songs; } public List<Genre> getGenres() throws MPDServerException { List<String> genreNames = listGenres(); List<Genre> genres = null; if (null != genreNames && !genreNames.isEmpty()) { genres = new ArrayList<Genre>(); for (String genre : genreNames) { genres.add(new Genre(genre)); } } if (null != genres) { Collections.sort(genres); } return genres; } /** * Retrieves <code>MPDConnection</code>. * * @return <code>MPDConnection</code>. */ public MPDConnection getMpdConnection() { return this.mpdConnection; } MPDConnection getMpdIdleConnection() { return this.mpdIdleConnection; } /** * Returns MPD server version. * * @return MPD Server version. */ public String getMpdVersion() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); int[] version = mpdIdleConnection.getMpdVersion(); StringBuffer sb = new StringBuffer(); for (int i = 0; i < version.length; i++) { sb.append(version[i]); if (i < (version.length - 1)) sb.append("."); } return sb.toString(); } /** * Returns the available outputs * * @return List of available outputs */ public List<MPDOutput> getOutputs() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<MPDOutput> result = new LinkedList<MPDOutput>(); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_OUTPUTS); LinkedList<String> lineCache = new LinkedList<String>(); for (String line : response) { if (line.startsWith("outputid: ")) { if (lineCache.size() != 0) { result.add(new MPDOutput(lineCache)); lineCache.clear(); } } lineCache.add(line); } if (lineCache.size() != 0) { result.add(new MPDOutput(lineCache)); } return result; } /** * Retrieves <code>playlist</code>. * * @return playlist. */ public MPDPlaylist getPlaylist() { return this.playlist; } /** * Returns a list of all available playlists */ public List<Item> getPlaylists() throws MPDServerException { return getPlaylists(false); } /** * Returns a list of all available playlists * * @param sort whether the return list should be sorted */ public List<Item> getPlaylists(boolean sort) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<Item> result = new ArrayList<Item>(); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LISTPLAYLISTS); for (String line : response) { if (line.startsWith("playlist")) result.add(new Playlist(line.substring("playlist: ".length()))); } if (sort) Collections.sort(result); return result; } public List<Music> getPlaylistSongs(String playlistName) throws MPDServerException { String args[] = new String[1]; args[0] = playlistName; List<Music> music = genericSearch(MPDCommand.MPD_CMD_PLAYLIST_INFO, args, false); for (int i = 0; i < music.size(); ++i) { music.get(i).setSongId(i); } return music; } /** * Retrieves root directory. * * @return root directory. */ public Directory getRootDirectory() { return rootDirectory; } public List<Music> getSongs(Album album) throws MPDServerException { List<Music> songs = Music.getMusicFromList (getMpdConnection().sendCommand(getSongsCommand(album)), true); if (album.hasAlbumArtist()) { // remove songs that don't have this albumartist // (mpd >=0.18 puts them in) String artistname = album.getArtist().getName(); for (int i = songs.size() - 1; i >= 0; i--) { if (!(artistname.equals(songs.get(i).getAlbumArtist()))) { songs.remove(i); } } } if (null != songs) { Collections.sort(songs); } return songs; } public List<Music> getSongs(Artist artist) throws MPDServerException { List<Album> albums = getAlbums(artist, false); List<Music> songs = new ArrayList<Music>(); for (Album a : albums) { songs.addAll(getSongs(a)); } return songs; } public MPDCommand getSongsCommand(Album album) { String albumname = album.getName(); Artist artist = album.getArtist(); if (null == artist) { // get songs for ANY artist return new MPDCommand(MPDCommand.MPD_CMD_FIND, MPDCommand.MPD_TAG_ALBUM, albumname); } String artistname = artist.getName(); if (album.hasAlbumArtist()) { return new MPDCommand(MPDCommand.MPD_CMD_FIND, MPDCommand.MPD_TAG_ALBUM, albumname, MPDCommand.MPD_TAG_ALBUM_ARTIST, artistname); } else { return new MPDCommand(MPDCommand.MPD_CMD_FIND, MPDCommand.MPD_TAG_ALBUM, albumname, MPDCommand.MPD_TAG_ARTIST, artistname, MPDCommand.MPD_TAG_ALBUM_ARTIST, ""); } } /** * Retrieves statistics for the connected server. * * @return statistics for the connected server. * @throws MPDServerException if an error occur while contacting server. */ public MPDStatistics getStatistics() throws MPDServerException { List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_STATISTICS); return new MPDStatistics(response); } /** * Retrieves status of the connected server. * * @return status of the connected server. * @throws MPDServerException if an error occur while contacting server. */ public MPDStatus getStatus() throws MPDServerException { return getStatus(false); } /** * Retrieves status of the connected server. * * @return status of the connected server. * @throws MPDServerException if an error occur while contacting server. */ public MPDStatus getStatus(boolean forceRefresh) throws MPDServerException { if (forceRefresh || mpdStatus == null || mpdStatus.getState() == null) { if (!isConnected()) { throw new MPDConnectionException("MPD Connection is not established"); } List<String> response = mpdStatusConnection.sendCommand(MPDCommand.MPD_CMD_STATUS); mpdStatus.updateStatus(response); } return mpdStatus; } /** * Retrieves current volume. * * @return current volume. * @throws MPDServerException if an error occur while contacting server. */ public int getVolume() throws MPDServerException { return this.getStatus().getVolume(); } /** * Returns true when connected and false when not connected. * * @return true when connected and false when not connected */ public boolean isConnected() { return mpdIdleConnection != null && mpdStatusConnection != null && mpdConnection != null && mpdIdleConnection.isConnected(); } public boolean isMpdConnectionNull() { return (this.mpdConnection == null); } public List<String> listAlbumArtists() throws MPDServerException { return listAlbumArtists(true); } /** * List all album artist names from database. * * @return album artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbumArtists(boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("albumartist: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } public List<String> listAlbumArtists(Genre genre) throws MPDServerException { return listAlbumArtists(genre, true); } /** * List all album artist names from database. * * @return album artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbumArtists(Genre genre, boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST, MPDCommand.MPD_TAG_GENRE, genre.getName()); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("albumartist: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } public List<String[]> listAlbumArtists(List<Album> albums) throws MPDServerException { for (Album a : albums) { mpdConnection.queueCommand(new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST, MPDCommand.MPD_TAG_ARTIST, a.getArtist().getName(), MPDCommand.MPD_TAG_ALBUM, a.getName())); } List<String[]> response = mpdConnection.sendCommandQueueSeparated(); if (response.size() != albums.size()) { Log.d("MPD listAlbumArtists", "ERROR"); return null; } for (int i = 0; i < response.size(); i++) { for (int j = 0; j < response.get(i).length; j++) { response.get(i)[j] = response.get(i)[j].substring("AlbumArtist: ".length()); } } return response; } /** * List all albums from database. * * @return <code>Collection</code> with all album names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums() throws MPDServerException { return listAlbums(null, false, true); } /** * List all albums from database. * * @param useAlbumArtist use AlbumArtist instead of Artist * @return <code>Collection</code> with all album names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums(boolean useAlbumArtist) throws MPDServerException { return listAlbums(null, useAlbumArtist, true); } /** * List all albums from a given artist, including an entry for songs with no * album tag. * * @param artist artist to list albums * @param useAlbumArtist use AlbumArtist instead of Artist * @return <code>Collection</code> with all album names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums(String artist, boolean useAlbumArtist) throws MPDServerException { return listAlbums(artist, useAlbumArtist, true); } /** * List all albums from a given artist. * * @param artist artist to list albums * @param useAlbumArtist use AlbumArtist instead of Artist * @param includeUnknownAlbum include an entry for songs with no album tag * @return <code>Collection</code> with all album names from the given * artist present in database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listAlbums(String artist, boolean useAlbumArtist, boolean includeUnknownAlbum) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); boolean foundSongWithoutAlbum = false; List<String> response = mpdConnection.sendCommand (listAlbumsCommand(artist, useAlbumArtist)); ArrayList<String> result = new ArrayList<String>(); for (String line : response) { String name = line.substring("Album: ".length()); if (name.length() > 0) { result.add(name); } else { foundSongWithoutAlbum = true; } } // add a single blank entry to host all songs without an album set if ((includeUnknownAlbum == true) && (foundSongWithoutAlbum == true)) { result.add(""); } Collections.sort(result); return result; } /* * get raw command String for listAlbums */ public MPDCommand listAlbumsCommand(String artist, boolean useAlbumArtist) { if (useAlbumArtist) { return new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM, MPDCommand.MPD_TAG_ALBUM_ARTIST, artist); } else { return new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM, artist); } } /** * List all artist names from database. * * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists() throws MPDServerException { return listArtists(true); } /** * List all artist names from database. * * @param sortInsensitive boolean for insensitive sort when true * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists(boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ARTIST); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { result.add(s.substring("Artist: ".length())); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } /* * List all albumartist or artist names of all given albums from database. * @return list of array of artist names for each album. * @throws MPDServerException if an error occurs while contacting server. */ public List<String[]> listArtists(List<Album> albums, boolean albumArtist) throws MPDServerException { if (!isConnected()) { throw new MPDServerException("MPD Connection is not established"); } ArrayList<String[]> result = new ArrayList<String[]>(); if (albums == null) { return result; } for (Album a : albums) { // When adding album artist to existing artist check that the artist // matches if (albumArtist && a.getArtist() != null && !a.getArtist().isUnknown()) { mpdConnection.queueCommand (new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ALBUM_ARTIST, MPDCommand.MPD_TAG_ALBUM, a.getName(), MPDCommand.MPD_TAG_ARTIST, a.getArtist().getName())); } else { mpdConnection.queueCommand (new MPDCommand(MPDCommand.MPD_CMD_LIST_TAG, (albumArtist ? MPDCommand.MPD_TAG_ALBUM_ARTIST : MPDCommand.MPD_TAG_ARTIST), MPDCommand.MPD_TAG_ALBUM, a .getName())); } } List<String[]> responses = mpdConnection.sendCommandQueueSeparated(); for (String[] r : responses) { ArrayList<String> albumresult = new ArrayList<String>(); for (String s : r) { String name = s.substring((albumArtist ? "AlbumArtist: " : "Artist: ").length()); albumresult.add(name); } result.add(albumresult.toArray(new String[0])); } return result; } /** * List all artist names from database. * * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists(String genre) throws MPDServerException { return listArtists(genre, true); } /** * List all artist names from database. * * @param sortInsensitive boolean for insensitive sort when true * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listArtists(String genre, boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_ARTIST, MPDCommand.MPD_TAG_GENRE, genre); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("Artist: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } /** * List all genre names from database. * * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listGenres() throws MPDServerException { return listGenres(true); } /** * List all genre names from database. * * @param sortInsensitive boolean for insensitive sort when true * @return artist names from database. * @throws MPDServerException if an error occur while contacting server. */ public List<String> listGenres(boolean sortInsensitive) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); List<String> response = mpdConnection.sendCommand(MPDCommand.MPD_CMD_LIST_TAG, MPDCommand.MPD_TAG_GENRE); ArrayList<String> result = new ArrayList<String>(); for (String s : response) { String name = s.substring("Genre: ".length()); result.add(name); } if (sortInsensitive) Collections.sort(result, String.CASE_INSENSITIVE_ORDER); else Collections.sort(result); return result; } public void movePlaylistSong(String playlistName, int from, int to) throws MPDServerException { getMpdConnection().sendCommand(MPDCommand.MPD_CMD_PLAYLIST_MOVE, playlistName, Integer.toString(from), Integer.toString(to)); } /** * Jumps to next playlist track. * * @throws MPDServerException if an error occur while contacting server. */ public void next() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_NEXT); } /** * Pauses/Resumes music playing. * * @throws MPDServerException if an error occur while contacting server. */ public void pause() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PAUSE); } /** * Starts playing music. * * @throws MPDServerException if an error occur while contacting server. */ public void play() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PLAY); } /** * Plays previous playlist music. * * @throws MPDServerException if an error occur while contacting server.. */ public void previous() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PREV); } /** * Tells server to refresh database. * * @throws MPDServerException if an error occur while contacting server. */ public void refreshDatabase() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_REFRESH); } /** * Tells server to refresh database. * * @throws MPDServerException if an error occur while contacting server. */ public void refreshDatabase(String folder) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_REFRESH, folder); } public void removeFromPlaylist(String playlistName, Integer pos) throws MPDServerException { getMpdConnection().sendCommand(MPDCommand.MPD_CMD_PLAYLIST_DEL, playlistName, Integer.toString(pos)); } /** * Similar to <code>find</code>,<code>search</code> looks for partial * matches in the MPD database. * * @param type type of search. Should be one of the following constants: * MPD_SEARCH_ARTIST, MPD_SEARCH_TITLE, MPD_SEARCH_ALBUM, * MPD_SEARCH_FILENAME * @param string case-insensitive locator string. Anything that contains * <code>string</code> will be returned in the results. * @return a Collection of <code>Music</code>. * @throws MPDServerException if an error occur while contacting server. * @see org.a0z.mpd.Music */ public Collection<Music> search(String type, String string) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_SEARCH, type, string); } public List<Music> search(String[] args) throws MPDServerException { return genericSearch(MPDCommand.MPD_CMD_SEARCH, args, true); } /** * Seeks current music to the position. * * @param position song position in seconds * @throws MPDServerException if an error occur while contacting server. */ public void seek(long position) throws MPDServerException { seekById(this.getStatus().getSongId(), position); } /** * Seeks music to the position. * * @param songId music id in playlist. * @param position song position in seconds. * @throws MPDServerException if an error occur while contacting server. */ public void seekById(int songId, long position) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SEEK_ID, Integer.toString(songId), Long.toString(position)); } /** * Seeks music to the position. * * @param index music position in playlist. * @param position song position in seconds. * @throws MPDServerException if an error occur while contacting server. */ public void seekByIndex(int index, long position) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SEEK, Integer.toString(index), Long.toString(position)); } /** * Enabled or disable consuming. * * @param consume if true song consuming will be enabled, if false song * consuming will be disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setConsume(boolean consume) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_CONSUME, consume ? "1" : "0"); } /** * Sets cross-fade. * * @param time cross-fade time in seconds. 0 to disable cross-fade. * @throws MPDServerException if an error occur while contacting server. */ public void setCrossfade(int time) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection .sendCommand(MPDCommand.MPD_CMD_CROSSFADE, Integer.toString(Math.max(0, time))); } /** * Enabled or disable random. * * @param random if true random will be enabled, if false random will be * disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setRandom(boolean random) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_RANDOM, random ? "1" : "0"); } /** * Enabled or disable repeating. * * @param repeat if true repeating will be enabled, if false repeating will * be disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setRepeat(boolean repeat) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_REPEAT, repeat ? "1" : "0"); } /** * Enabled or disable single mode. * * @param single if true single mode will be enabled, if false single mode * will be disabled. * @throws MPDServerException if an error occur while contacting server. */ public void setSingle(boolean single) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SINGLE, single ? "1" : "0"); } /** * Sets volume to <code>volume</code>. * * @param volume new volume value, must be in 0-100 range. * @throws MPDServerException if an error occur while contacting server. */ public void setVolume(int volume) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); int vol = Math.max(MPDCommand.MIN_VOLUME, Math.min(MPDCommand.MAX_VOLUME, volume)); mpdConnection.sendCommand(MPDCommand.MPD_CMD_SET_VOLUME, Integer.toString(vol)); } /** * Kills server. * * @throws MPDServerException if an error occur while contacting server. */ public void shutdown() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_KILL); } /** * Skip to song with specified <code>id</code>. * * @param id song id. * @throws MPDServerException if an error occur while contacting server. */ public void skipToId(int id) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PLAY_ID, Integer.toString(id)); } /** * Jumps to track <code>position</code> from playlist. * * @param position track number. * @throws MPDServerException if an error occur while contacting server. * @see #skipToId(int) */ public void skipToPosition(int position) throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_PLAY, Integer.toString(position)); } /** * Stops music playing. * * @throws MPDServerException if an error occur while contacting server. */ public void stop() throws MPDServerException { if (!isConnected()) throw new MPDServerException("MPD Connection is not established"); mpdConnection.sendCommand(MPDCommand.MPD_CMD_STOP); } /** * Wait for server changes using "idle" command on the dedicated connection. * * @return Data read from the server. * @throws MPDServerException if an error occur while contacting server */ public List<String> waitForChanges() throws MPDServerException { while (mpdIdleConnection != null && mpdIdleConnection.isConnected()) { List<String> data = mpdIdleConnection .sendAsyncCommand(MPDCommand.MPD_CMD_IDLE); if (data.isEmpty()) { continue; } return data; } throw new MPDConnectionException("IDLE connection lost"); } }
MPD: Create string array of correct size when using toArray(). When toArray() is called on a Collection object with a zero-length array argument, the toArray() method has to construct a new array of the right size using reflection, which is significantly expensive.
JMPDComm/src/org/a0z/mpd/MPD.java
MPD: Create string array of correct size when using toArray().
Java
apache-2.0
a2a6525c846d4b481be9a7395dc61e8883329f4e
0
jpechane/debezium,debezium/debezium,jpechane/debezium,debezium/debezium,debezium/debezium,jpechane/debezium,jpechane/debezium,debezium/debezium
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.postgresql; import static io.debezium.connector.postgresql.TestHelper.PK_FIELD; import static io.debezium.connector.postgresql.TestHelper.topicName; import static io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIs.DecoderPluginName.PGOUTPUT; import static io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIsNot.DecoderPluginName.WAL2JSON; import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertTrue; import static org.fest.assertions.Assertions.assertThat; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotEquals; import java.math.BigDecimal; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.lang3.RandomStringUtils; import org.apache.kafka.connect.data.Decimal; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.source.SourceRecord; import org.awaitility.Awaitility; import org.awaitility.Duration; import org.fest.assertions.Assertions; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import io.debezium.config.CommonConnectorConfig; import io.debezium.config.Configuration; import io.debezium.connector.postgresql.PostgresConnectorConfig.IntervalHandlingMode; import io.debezium.connector.postgresql.PostgresConnectorConfig.SchemaRefreshMode; import io.debezium.connector.postgresql.PostgresConnectorConfig.SnapshotMode; import io.debezium.connector.postgresql.connection.PostgresConnection; import io.debezium.connector.postgresql.connection.ReplicationConnection.Builder; import io.debezium.connector.postgresql.junit.SkipTestDependingOnDecoderPluginNameRule; import io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIs; import io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIsNot; import io.debezium.connector.postgresql.spi.SlotState; import io.debezium.data.Bits; import io.debezium.data.Enum; import io.debezium.data.Envelope; import io.debezium.data.SpecialValueDecimal; import io.debezium.data.VariableScaleDecimal; import io.debezium.data.VerifyRecord; import io.debezium.data.geometry.Point; import io.debezium.doc.FixFor; import io.debezium.heartbeat.Heartbeat; import io.debezium.jdbc.JdbcValueConverters.DecimalMode; import io.debezium.jdbc.TemporalPrecisionMode; import io.debezium.junit.ConditionalFail; import io.debezium.junit.ShouldFailWhen; import io.debezium.junit.logging.LogInterceptor; import io.debezium.relational.RelationalDatabaseConnectorConfig.DecimalHandlingMode; import io.debezium.relational.Table; import io.debezium.relational.TableId; import io.debezium.util.Stopwatch; import io.debezium.util.Testing; /** * Integration test for the {@link RecordsStreamProducer} class. This also tests indirectly the PG plugin functionality for * different use cases. * * @author Horia Chiorean ([email protected]) */ public class RecordsStreamProducerIT extends AbstractRecordsProducerTest { private TestConsumer consumer; @Rule public final TestRule skip = new SkipTestDependingOnDecoderPluginNameRule(); @Rule public TestRule conditionalFail = new ConditionalFail(); @Before public void before() throws Exception { // ensure the slot is deleted for each test TestHelper.dropAllSchemas(); TestHelper.executeDDL("init_postgis.ddl"); String statements = "CREATE SCHEMA IF NOT EXISTS public;" + "DROP TABLE IF EXISTS test_table;" + "CREATE TABLE test_table (pk SERIAL, text TEXT, PRIMARY KEY(pk));" + "CREATE TABLE table_with_interval (id SERIAL PRIMARY KEY, title VARCHAR(512) NOT NULL, time_limit INTERVAL DEFAULT '60 days'::INTERVAL NOT NULL);" + "INSERT INTO test_table(text) VALUES ('insert');"; TestHelper.execute(statements); Configuration.Builder configBuilder = TestHelper.defaultConfig() .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, false) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis"); // todo DBZ-766 are these really needed? if (TestHelper.decoderPlugin() == PostgresConnectorConfig.LogicalDecoder.PGOUTPUT) { configBuilder = configBuilder.with("database.replication", "database") .with("database.preferQueryMode", "simple") .with("assumeMinServerVersion.set", "9.4"); } Testing.Print.enable(); } private void startConnector(Function<Configuration.Builder, Configuration.Builder> customConfig, boolean waitForSnapshot) throws InterruptedException { start(PostgresConnector.class, new PostgresConnectorConfig(customConfig.apply(TestHelper.defaultConfig() .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, false) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.SNAPSHOT_MODE, waitForSnapshot ? SnapshotMode.INITIAL : SnapshotMode.NEVER)) .build()).getConfig()); assertConnectorIsRunning(); waitForStreamingToStart(); if (waitForSnapshot) { // Wait for snapshot to be in progress consumer = testConsumer(1); consumer.await(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS); consumer.remove(); } } private void startConnector(Function<Configuration.Builder, Configuration.Builder> customConfig) throws InterruptedException { startConnector(customConfig, true); } private void startConnector() throws InterruptedException { startConnector(Function.identity(), true); } @Test public void shouldReceiveChangesForInsertsWithDifferentDataTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); consumer = testConsumer(1); // numerical types consumer.expects(1); assertInsert(INSERT_NUMERIC_TYPES_STMT, 1, schemasAndValuesForNumericType()); // numerical decimal types consumer.expects(1); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT_NO_NAN, 1, schemasAndValuesForBigDecimalEncodedNumericTypes()); // string types consumer.expects(1); assertInsert(INSERT_STRING_TYPES_STMT, 1, schemasAndValuesForStringTypes()); // monetary types consumer.expects(1); assertInsert(INSERT_CASH_TYPES_STMT, 1, schemaAndValuesForMoneyTypes()); // bits and bytes consumer.expects(1); assertInsert(INSERT_BIN_TYPES_STMT, 1, schemaAndValuesForBinTypes()); // date and time consumer.expects(1); assertInsert(INSERT_DATE_TIME_TYPES_STMT, 1, schemaAndValuesForDateTimeTypes()); // text consumer.expects(1); assertInsert(INSERT_TEXT_TYPES_STMT, 1, schemasAndValuesForTextTypes()); // geom types consumer.expects(1); assertInsert(INSERT_GEOM_TYPES_STMT, 1, schemaAndValuesForGeomTypes()); // range types consumer.expects(1); assertInsert(INSERT_RANGE_TYPES_STMT, 1, schemaAndValuesForRangeTypes()); } @Test @FixFor("DBZ-1498") public void shouldReceiveChangesForIntervalAsString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config .with(PostgresConnectorConfig.INTERVAL_HANDLING_MODE, IntervalHandlingMode.STRING)); consumer = testConsumer(1); // date and time consumer.expects(1); assertInsert(INSERT_DATE_TIME_TYPES_STMT, 1, schemaAndValuesForIntervalAsString()); } @Test @FixFor("DBZ-766") public void shouldReceiveChangesAfterConnectionRestart() throws Exception { TestHelper.dropDefaultReplicationSlot(); TestHelper.dropPublication(); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis")); TestHelper.execute("CREATE TABLE t0 (pk SERIAL, d INTEGER, PRIMARY KEY(pk));"); consumer = testConsumer(1); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d) VALUES(1,1);"); assertRecordInserted("public.t0", PK_FIELD, 1); // simulate the connector is stopped stopConnector(); // Alter schema offline TestHelper.execute("ALTER TABLE t0 ADD COLUMN d2 INTEGER;"); TestHelper.execute("ALTER TABLE t0 ALTER COLUMN d SET NOT NULL;"); // Start the producer and wait; the wait is to guarantee the stream thread is polling // This appears to be a potential race condition problem startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis"), false); consumer = testConsumer(1); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d,d2) VALUES (2,1,3);"); assertRecordInserted("public.t0", PK_FIELD, 2); } @Test @FixFor("DBZ-1698") public void shouldReceiveUpdateSchemaAfterConnectionRestart() throws Exception { TestHelper.dropDefaultReplicationSlot(); TestHelper.dropPublication(); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.DROP_SLOT_ON_STOP, false) .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); TestHelper.execute("CREATE TABLE t0 (pk SERIAL, d INTEGER, PRIMARY KEY(pk));"); consumer = testConsumer(1); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d) VALUES(1,1);"); assertRecordInserted("public.t0", PK_FIELD, 1); // simulate the connector is stopped stopConnector(); Thread.sleep(3000); // Add record offline TestHelper.execute("INSERT INTO t0 (pk,d) VALUES(2,2);"); // Alter schema offline TestHelper.execute("ALTER TABLE t0 ADD COLUMN d2 NUMERIC(10,6) DEFAULT 0 NOT NULL;"); TestHelper.execute("ALTER TABLE t0 ALTER COLUMN d SET NOT NULL;"); // Start the producer and wait; the wait is to guarantee the stream thread is polling // This appears to be a potential race condition problem startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.DROP_SLOT_ON_STOP, false) .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST), false); consumer = testConsumer(2); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d,d2) VALUES (3,1,3);"); assertRecordInserted("public.t0", PK_FIELD, 2); assertRecordInserted("public.t0", PK_FIELD, 3); stopConnector(); TestHelper.dropDefaultReplicationSlot(); TestHelper.dropPublication(); } @Test public void shouldReceiveChangesForInsertsCustomTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true)); // custom types + null value assertInsert(INSERT_CUSTOM_TYPES_STMT, 1, schemasAndValuesForCustomTypes()); } @Test @FixFor("DBZ-1141") public void shouldProcessNotNullColumnsConnectDateTypes() throws Exception { final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.CONNECT); if (before != null) { Assertions.assertThat(before.get("created_at")).isEqualTo(new java.util.Date(0)); Assertions.assertThat(before.get("created_at_tz")).isEqualTo("1970-01-01T00:00:00Z"); Assertions.assertThat(before.get("ctime")).isEqualTo(new java.util.Date(0)); Assertions.assertThat(before.get("ctime_tz")).isEqualTo("00:00:00Z"); Assertions.assertThat(before.get("cdate")).isEqualTo(new java.util.Date(0)); Assertions.assertThat(before.get("cmoney")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cbits")).isEqualTo(new byte[0]); } } @Test @FixFor("DBZ-1141") public void shouldProcessNotNullColumnsAdaptiveDateTypes() throws Exception { final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.ADAPTIVE); if (before != null) { Assertions.assertThat(before.get("created_at")).isEqualTo(0L); Assertions.assertThat(before.get("created_at_tz")).isEqualTo("1970-01-01T00:00:00Z"); Assertions.assertThat(before.get("ctime")).isEqualTo(0L); Assertions.assertThat(before.get("ctime_tz")).isEqualTo("00:00:00Z"); Assertions.assertThat(before.get("cdate")).isEqualTo(0); Assertions.assertThat(before.get("cmoney")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cbits")).isEqualTo(new byte[0]); } } @Test @FixFor("DBZ-1141") public void shouldProcessNotNullColumnsAdaptiveMsDateTypes() throws Exception { final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.ADAPTIVE_TIME_MICROSECONDS); if (before != null) { Assertions.assertThat(before.get("created_at")).isEqualTo(0L); Assertions.assertThat(before.get("created_at_tz")).isEqualTo("1970-01-01T00:00:00Z"); Assertions.assertThat(before.get("ctime")).isEqualTo(0L); Assertions.assertThat(before.get("ctime_tz")).isEqualTo("00:00:00Z"); Assertions.assertThat(before.get("cdate")).isEqualTo(0); Assertions.assertThat(before.get("cmoney")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cbits")).isEqualTo(new byte[0]); } } @Test @FixFor("DBZ-1158") public void shouldProcessNotNullColumnsFallbacksReplicaIdentity() throws Exception { // Use adaptive here as its the connector default final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.ADAPTIVE); if (before != null) { Assertions.assertThat(before.get("csmallint")).isEqualTo((short) 0); Assertions.assertThat(before.get("cinteger")).isEqualTo(0); Assertions.assertThat(before.get("cbigint")).isEqualTo(0L); Assertions.assertThat(before.get("creal")).isEqualTo(0.f); Assertions.assertThat(before.get("cbool")).isEqualTo(false); Assertions.assertThat(before.get("cfloat8")).isEqualTo(0.0); Assertions.assertThat(before.get("cnumeric")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cvarchar")).isEqualTo(""); Assertions.assertThat(before.get("cbox")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("ccircle")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cinterval")).isEqualTo(0L); Assertions.assertThat(before.get("cline")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("clseg")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cpath")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cpoint")).isEqualTo(Point.createValue(Point.builder().build(), 0, 0)); Assertions.assertThat(before.get("cpolygon")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cchar")).isEqualTo(""); Assertions.assertThat(before.get("ctext")).isEqualTo(""); Assertions.assertThat(before.get("cjson")).isEqualTo(""); Assertions.assertThat(before.get("cxml")).isEqualTo(""); Assertions.assertThat(before.get("cuuid")).isEqualTo(""); Assertions.assertThat(before.get("cvarbit")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cinet")).isEqualTo(""); Assertions.assertThat(before.get("ccidr")).isEqualTo(""); Assertions.assertThat(before.get("cmacaddr")).isEqualTo(""); } } private Struct testProcessNotNullColumns(TemporalPrecisionMode temporalMode) throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.TIME_PRECISION_MODE, temporalMode)); consumer.expects(1); executeAndWait("INSERT INTO not_null_table VALUES (default, 30, '2019-02-10 11:34:58', '2019-02-10 11:35:00', " + "'10:20:11', '10:20:12', '2019-02-01', '$20', B'101', 32766, 2147483646, 9223372036854775806, 3.14, " + "true, 3.14768, 1234.56, 'Test', '(0,0),(1,1)', '<(0,0),1>', '01:02:03', '{0,1,2}', '((0,0),(1,1))', " + "'((0,0),(0,1),(0,2))', '(1,1)', '((0,0),(0,1),(1,1))', 'a', 'hello world', '{\"key\": 123}', " + "'<doc><item>abc</item></doc>', 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', B'101', '192.168.1.100', " + "'192.168.1', '08:00:2b:01:02:03');"); consumer.remove(); consumer.expects(1); executeAndWait("UPDATE not_null_table SET val=40"); final SourceRecord record = consumer.remove(); VerifyRecord.isValidUpdate(record, "pk", 1); VerifyRecord.isValid(record); return ((Struct) record.value()).getStruct("before"); } @Test(timeout = 30000) public void shouldReceiveChangesForInsertsWithPostgisTypes() throws Exception { TestHelper.executeDDL("postgis_create_tables.ddl"); startConnector(); consumer = testConsumer(1, "public"); // spatial_ref_sys produces a ton of records in the postgis schema consumer.setIgnoreExtraRecords(true); // need to wait for all the spatial_ref_sys to flow through and be ignored. // this exceeds the normal 2s timeout. TestHelper.execute("INSERT INTO public.dummy_table DEFAULT VALUES;"); consumer.await(TestHelper.waitTimeForRecords() * 10, TimeUnit.SECONDS); while (true) { if (!consumer.isEmpty()) { SourceRecord record = consumer.remove(); if (record.topic().endsWith(".public.dummy_table")) { break; } } } // now do it for actual testing // postgis types consumer.expects(1); assertInsert(INSERT_POSTGIS_TYPES_STMT, 1, schemaAndValuesForPostgisTypes()); } @Test(timeout = 30000) public void shouldReceiveChangesForInsertsWithPostgisArrayTypes() throws Exception { TestHelper.executeDDL("postgis_create_tables.ddl"); startConnector(); consumer = testConsumer(1, "public"); // spatial_ref_sys produces a ton of records in the postgis schema consumer.setIgnoreExtraRecords(true); // need to wait for all the spatial_ref_sys to flow through and be ignored. // this exceeds the normal 2s timeout. TestHelper.execute("INSERT INTO public.dummy_table DEFAULT VALUES;"); consumer.await(TestHelper.waitTimeForRecords() * 10, TimeUnit.SECONDS); while (true) { if (!consumer.isEmpty()) { SourceRecord record = consumer.remove(); if (record.topic().endsWith(".public.dummy_table")) { break; } } } // now do it for actual testing // postgis types consumer.expects(1); assertInsert(INSERT_POSTGIS_ARRAY_TYPES_STMT, 1, schemaAndValuesForPostgisArrayTypes()); } @Test @ShouldFailWhen(DecoderDifferences.AreQuotedIdentifiersUnsupported.class) // TODO DBZ-493 public void shouldReceiveChangesForInsertsWithQuotedNames() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); // Quoted column name assertInsert(INSERT_QUOTED_TYPES_STMT, 1, schemasAndValuesForQuotedTypes()); } @Test public void shouldReceiveChangesForInsertsWithArrayTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); assertInsert(INSERT_ARRAY_TYPES_STMT, 1, schemasAndValuesForArrayTypes()); } @Test @FixFor("DBZ-1029") public void shouldReceiveChangesForInsertsIndependentOfReplicaIdentity() throws Exception { // insert statement should not be affected by replica identity settings in any way startConnector(); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); String statement = "INSERT INTO test_table (text) VALUES ('pk_and_default');"; assertInsert(statement, 2, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "pk_and_default"))); consumer.expects(1); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY FULL;"); statement = "INSERT INTO test_table (text) VALUES ('pk_and_full');"; assertInsert(statement, 3, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "pk_and_full"))); consumer.expects(1); TestHelper.execute("ALTER TABLE test_table DROP CONSTRAINT test_table_pkey CASCADE;"); statement = "INSERT INTO test_table (pk, text) VALUES (4, 'no_pk_and_full');"; assertInsert(statement, 4, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "no_pk_and_full"))); consumer.expects(1); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); statement = "INSERT INTO test_table (pk, text) VALUES (5, 'no_pk_and_default');"; assertInsert(statement, 5, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "no_pk_and_default"))); } @Test @FixFor("DBZ-478") public void shouldReceiveChangesForNullInsertsWithArrayTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); assertInsert(INSERT_ARRAY_TYPES_WITH_NULL_VALUES_STMT, 1, schemasAndValuesForArrayTypesWithNullValues()); } @Test public void shouldReceiveChangesForNewTable() throws Exception { String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.a (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "INSERT INTO s1.a (aa) VALUES (11);"; startConnector(); executeAndWait(statement); assertRecordInserted("s1.a", PK_FIELD, 1); } @Test public void shouldReceiveChangesForRenamedTable() throws Exception { String statement = "DROP TABLE IF EXISTS renamed_test_table;" + "ALTER TABLE test_table RENAME TO renamed_test_table;" + "INSERT INTO renamed_test_table (text) VALUES ('new');"; startConnector(); executeAndWait(statement); assertRecordInserted("public.renamed_test_table", PK_FIELD, 2); } @Test @SkipWhenDecoderPluginNameIs(value = PGOUTPUT, reason = "An update on a table with no primary key and default replica throws PSQLException as tables must have a PK") public void shouldReceiveChangesForUpdates() throws Exception { startConnector(); executeAndWait("UPDATE test_table set text='update' WHERE pk=1"); // the update record should be the last record SourceRecord updatedRecord = consumer.remove(); String topicName = topicName("public.test_table"); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // default replica identity only fires previous values for PK changes List<SchemaAndValueField> expectedAfter = Collections.singletonList( new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // alter the table and set its replica identity to full the issue another update consumer.expects(1); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY FULL"); executeAndWait("UPDATE test_table set text='update2' WHERE pk=1"); updatedRecord = consumer.remove(); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now we should get both old and new values List<SchemaAndValueField> expectedBefore = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); expectedAfter = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update2")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // without PK and with REPLICA IDENTITY FULL we still getting all fields 'before' and all fields 'after' TestHelper.execute("ALTER TABLE test_table DROP CONSTRAINT test_table_pkey CASCADE;"); consumer.expects(1); executeAndWait("UPDATE test_table SET text = 'update3' WHERE pk = 1;"); updatedRecord = consumer.remove(); assertEquals(topicName, updatedRecord.topic()); expectedBefore = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update2")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); expectedAfter = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update3")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // without PK and with REPLICA IDENTITY DEFAULT we will get nothing TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); consumer.expects(0); executeAndWaitForNoRecords("UPDATE test_table SET text = 'no_pk_and_default' WHERE pk = 1;"); assertThat(consumer.isEmpty()).isTrue(); } @Test public void shouldReceiveChangesForUpdatesWithColumnChanges() throws Exception { // add a new column String statements = "ALTER TABLE test_table ADD COLUMN uvc VARCHAR(2);" + "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "UPDATE test_table SET uvc ='aa' WHERE pk = 1;"; startConnector(); consumer = testConsumer(1); executeAndWait(statements); // the update should be the last record SourceRecord updatedRecord = consumer.remove(); String topicName = topicName("public.test_table"); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now check we got the updated value (the old value should be null, the new one whatever we set) List<SchemaAndValueField> expectedBefore = Collections.singletonList(new SchemaAndValueField("uvc", null, null)); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); List<SchemaAndValueField> expectedAfter = Collections.singletonList(new SchemaAndValueField("uvc", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "aa")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // rename a column statements = "ALTER TABLE test_table RENAME COLUMN uvc to xvc;" + "UPDATE test_table SET xvc ='bb' WHERE pk = 1;"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now check we got the updated value (the old value should be null, the new one whatever we set) expectedBefore = Collections.singletonList(new SchemaAndValueField("xvc", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "aa")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); expectedAfter = Collections.singletonList(new SchemaAndValueField("xvc", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "bb")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // drop a column statements = "ALTER TABLE test_table DROP COLUMN xvc;" + "UPDATE test_table SET text ='update' WHERE pk = 1;"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // change a column type statements = "ALTER TABLE test_table ADD COLUMN modtype INTEGER;" + "INSERT INTO test_table (pk,modtype) VALUES (2,1);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 2); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("modtype", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 1)), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN modtype TYPE SMALLINT;" + "UPDATE test_table SET modtype = 2 WHERE pk = 2;"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 2); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("modtype", SchemaBuilder.OPTIONAL_INT16_SCHEMA, (short) 1)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("modtype", SchemaBuilder.OPTIONAL_INT16_SCHEMA, (short) 2)), updatedRecord, Envelope.FieldName.AFTER); } @Test public void shouldReceiveChangesForUpdatesWithPKChanges() throws Exception { startConnector(); consumer = testConsumer(3); executeAndWait("UPDATE test_table SET text = 'update', pk = 2"); String topicName = topicName("public.test_table"); // first should be a delete of the old pk SourceRecord deleteRecord = consumer.remove(); assertEquals(topicName, deleteRecord.topic()); VerifyRecord.isValidDelete(deleteRecord, PK_FIELD, 1); // followed by a tombstone of the old pk SourceRecord tombstoneRecord = consumer.remove(); assertEquals(topicName, tombstoneRecord.topic()); VerifyRecord.isValidTombstone(tombstoneRecord, PK_FIELD, 1); // and finally insert of the new value SourceRecord insertRecord = consumer.remove(); assertEquals(topicName, insertRecord.topic()); VerifyRecord.isValidInsert(insertRecord, PK_FIELD, 2); } @Test @FixFor("DBZ-582") public void shouldReceiveChangesForUpdatesWithPKChangesWithoutTombstone() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(CommonConnectorConfig.TOMBSTONES_ON_DELETE, false)); consumer = testConsumer(2); executeAndWait("UPDATE test_table SET text = 'update', pk = 2"); String topicName = topicName("public.test_table"); // first should be a delete of the old pk SourceRecord deleteRecord = consumer.remove(); assertEquals(topicName, deleteRecord.topic()); VerifyRecord.isValidDelete(deleteRecord, PK_FIELD, 1); // followed by insert of the new value SourceRecord insertRecord = consumer.remove(); assertEquals(topicName, insertRecord.topic()); VerifyRecord.isValidInsert(insertRecord, PK_FIELD, 2); } @Test public void shouldReceiveChangesForDefaultValues() throws Exception { String statements = "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "ALTER TABLE test_table ADD COLUMN default_column TEXT DEFAULT 'default';" + "INSERT INTO test_table (text) VALUES ('update');"; startConnector(); consumer = testConsumer(1); executeAndWait(statements); SourceRecord insertRecord = consumer.remove(); assertEquals(topicName("public.test_table"), insertRecord.topic()); VerifyRecord.isValidInsert(insertRecord, PK_FIELD, 2); List<SchemaAndValueField> expectedSchemaAndValues = Arrays.asList( new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update"), new SchemaAndValueField("default_column", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "default")); assertRecordSchemaAndValues(expectedSchemaAndValues, insertRecord, Envelope.FieldName.AFTER); } @Test public void shouldReceiveChangesForTypeConstraints() throws Exception { // add a new column String statements = "ALTER TABLE test_table ADD COLUMN num_val NUMERIC(5,2);" + "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "UPDATE test_table SET num_val = 123.45 WHERE pk = 1;"; startConnector(); consumer = testConsumer(1); executeAndWait(statements); // the update should be the last record SourceRecord updatedRecord = consumer.remove(); String topicName = topicName("public.test_table"); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now check we got the updated value (the old value should be null, the new one whatever we set) List<SchemaAndValueField> expectedBefore = Collections.singletonList(new SchemaAndValueField("num_val", null, null)); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); List<SchemaAndValueField> expectedAfter = Collections.singletonList( new SchemaAndValueField("num_val", Decimal.builder(2).parameter(TestHelper.PRECISION_PARAMETER_KEY, "5").optional().build(), new BigDecimal("123.45"))); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // change a constraint statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE NUMERIC(6,1);" + "INSERT INTO test_table (pk,num_val) VALUES (2,123.41);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 2); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", Decimal.builder(1).parameter(TestHelper.PRECISION_PARAMETER_KEY, "6").optional().build(), new BigDecimal("123.4"))), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE NUMERIC;" + "INSERT INTO test_table (pk,num_val) VALUES (3,123.4567);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); final Struct dvs = new Struct(VariableScaleDecimal.schema()); dvs.put("scale", 4).put("value", new BigDecimal("123.4567").unscaledValue().toByteArray()); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 3); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", VariableScaleDecimal.builder().optional().build(), dvs)), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE DECIMAL(12,4);" + "INSERT INTO test_table (pk,num_val) VALUES (4,2.48);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 4); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", Decimal.builder(4).parameter(TestHelper.PRECISION_PARAMETER_KEY, "12").optional().build(), new BigDecimal("2.4800"))), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE DECIMAL(12);" + "INSERT INTO test_table (pk,num_val) VALUES (5,1238);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 5); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", Decimal.builder(0).parameter(TestHelper.PRECISION_PARAMETER_KEY, "12").optional().build(), new BigDecimal("1238"))), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE DECIMAL;" + "INSERT INTO test_table (pk,num_val) VALUES (6,1225.1);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); final Struct dvs2 = new Struct(VariableScaleDecimal.schema()); dvs2.put("scale", 1).put("value", new BigDecimal("1225.1").unscaledValue().toByteArray()); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 6); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", VariableScaleDecimal.builder().optional().build(), dvs2)), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val SET NOT NULL;" + "INSERT INTO test_table (pk,num_val) VALUES (7,1976);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); dvs2.put("scale", 0).put("value", new BigDecimal("1976").unscaledValue().toByteArray()); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 7); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", VariableScaleDecimal.builder().build(), dvs2)), updatedRecord, Envelope.FieldName.AFTER); } @Test public void shouldReceiveChangesForDeletes() throws Exception { // add a new entry and remove both String statements = "INSERT INTO test_table (text) VALUES ('insert2');" + "DELETE FROM test_table WHERE pk > 0;"; startConnector(); consumer = testConsumer(5); executeAndWait(statements); String topicPrefix = "public.test_table"; String topicName = topicName(topicPrefix); assertRecordInserted(topicPrefix, PK_FIELD, 2); // first entry removed SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 1); // followed by a tombstone record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidTombstone(record, PK_FIELD, 1); // second entry removed record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 2); // followed by a tombstone record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidTombstone(record, PK_FIELD, 2); } @Test @FixFor("DBZ-582") public void shouldReceiveChangesForDeletesWithoutTombstone() throws Exception { // add a new entry and remove both String statements = "INSERT INTO test_table (text) VALUES ('insert2');" + "DELETE FROM test_table WHERE pk > 0;"; startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(CommonConnectorConfig.TOMBSTONES_ON_DELETE, false)); consumer = testConsumer(3); executeAndWait(statements); String topicPrefix = "public.test_table"; String topicName = topicName(topicPrefix); assertRecordInserted(topicPrefix, PK_FIELD, 2); // first entry removed SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 1); // second entry removed record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 2); } @Test @SkipWhenDecoderPluginNameIs(value = PGOUTPUT, reason = "A delete on a table with no primary key and default replica throws PSQLException as tables must have a PK") public void shouldReceiveChangesForDeletesDependingOnReplicaIdentity() throws Exception { String topicName = topicName("public.test_table"); // With PK we should get delete event with default level of replica identity String statement = "ALTER TABLE test_table REPLICA IDENTITY DEFAULT;" + "DELETE FROM test_table WHERE pk = 1;"; startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(CommonConnectorConfig.TOMBSTONES_ON_DELETE, false)); consumer = testConsumer(1); executeAndWait(statement); SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 1); // Without PK we should get delete event with REPLICA IDENTITY FULL statement = "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "ALTER TABLE test_table DROP CONSTRAINT test_table_pkey CASCADE;" + "INSERT INTO test_table (pk, text) VALUES (2, 'insert2');" + "DELETE FROM test_table WHERE pk = 2;"; consumer.expects(2); executeAndWait(statement); assertRecordInserted("public.test_table", PK_FIELD, 2); record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 2); // Without PK and without REPLICA IDENTITY FULL we will not get delete event statement = "ALTER TABLE test_table REPLICA IDENTITY DEFAULT;" + "INSERT INTO test_table (pk, text) VALUES (3, 'insert3');" + "DELETE FROM test_table WHERE pk = 3;"; consumer.expects(1); executeAndWait(statement); assertRecordInserted("public.test_table", PK_FIELD, 3); assertThat(consumer.isEmpty()).isTrue(); } @Test public void shouldReceiveNumericTypeAsDouble() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE)); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT, 1, schemasAndValuesForDoubleEncodedNumericTypes()); } @Test @FixFor("DBZ-611") public void shouldReceiveNumericTypeAsString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.STRING)); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT, 1, schemasAndValuesForStringEncodedNumericTypes()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithSingleValueAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_STMT, 1, schemaAndValueFieldForMapEncodedHStoreType()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithMultipleValuesAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_WITH_MULTIPLE_VALUES_STMT, 1, schemaAndValueFieldForMapEncodedHStoreTypeWithMultipleValues()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithNullValuesAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_WITH_NULL_VALUES_STMT, 1, schemaAndValueFieldForMapEncodedHStoreTypeWithNullValues()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithSpecialCharactersInValuesAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_WITH_SPECIAL_CHAR_STMT, 1, schemaAndValueFieldForMapEncodedHStoreTypeWithSpecialCharacters()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeAsJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); consumer = testConsumer(1); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreType()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithMultipleValuesAsJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_WITH_MULTIPLE_VALUES_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreTypeWithMultipleValues()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithSpecialValuesInJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_WITH_SPECIAL_CHAR_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreTypeWithSpcialCharacters()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithNullValuesAsJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_WITH_NULL_VALUES_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreTypeWithNullValues()); } @Test @FixFor("DBZ-259") public void shouldProcessIntervalDelete() throws Exception { final String statements = "INSERT INTO table_with_interval VALUES (default, 'Foo', default);" + "INSERT INTO table_with_interval VALUES (default, 'Bar', default);" + "DELETE FROM table_with_interval WHERE id = 1;"; startConnector(); consumer.expects(4); executeAndWait(statements); final String topicPrefix = "public.table_with_interval"; final String topicName = topicName(topicPrefix); final String pk = "id"; assertRecordInserted(topicPrefix, pk, 1); assertRecordInserted(topicPrefix, pk, 2); // first entry removed SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, pk, 1); // followed by a tombstone record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidTombstone(record, pk, 1); } @Test @FixFor("DBZ-644") public void shouldPropagateSourceColumnTypeToSchemaParameter() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with("column.propagate.source.type", ".*vc.*")); assertInsert(INSERT_STRING_TYPES_STMT, 1, schemasAndValuesForStringTypesWithSourceColumnTypeInfo()); } @Test @FixFor("DBZ-1073") public void shouldPropagateSourceColumnTypeScaleToSchemaParameter() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config .with("column.propagate.source.type", ".*(d|dzs)") .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, PostgresConnectorConfig.DecimalHandlingMode.DOUBLE)); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT, 1, schemasAndValuesForNumericTypesWithSourceColumnTypeInfo()); } @Test @FixFor("DBZ-800") public void shouldReceiveHeartbeatAlsoWhenChangingNonWhitelistedTable() throws Exception { // the high heartbeat interval should make sure that a heartbeat message is emitted only // after insert statement which allows to check that lsn is not flushed by itself // but only when heartbeat message is produced startConnector(config -> config .with(Heartbeat.HEARTBEAT_INTERVAL, "100") .with(PostgresConnectorConfig.POLL_INTERVAL_MS, "50") .with(PostgresConnectorConfig.TABLE_WHITELIST, "s1\\.b") .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER), false); waitForStreamingToStart(); String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.a (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.b (pk SERIAL, bb integer, PRIMARY KEY(pk));" + "INSERT INTO s1.b (bb) VALUES (22);"; TestHelper.execute(statement); final AtomicInteger heartbeatCount = new AtomicInteger(); Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); System.out.println(record); if (record != null) { if (record.topic().endsWith("s1.b")) { assertRecordInserted(record, "s1.b", PK_FIELD, 1); return true; } else { assertHeartBeatRecord(record); heartbeatCount.incrementAndGet(); } } return false; }); Assertions.assertThat(heartbeatCount.get()).isGreaterThan(0); final Set<Long> lsn = new HashSet<>(); TestHelper.execute("INSERT INTO s1.a (aa) VALUES (11);"); Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); if (record != null) { lsn.add((Long) record.sourceOffset().get("lsn")); return lsn.size() >= 2; } return false; }); Assertions.assertThat(lsn.size()).isGreaterThanOrEqualTo(2); } @Test @FixFor("DBZ-1565") public void shouldWarnOnMissingHeartbeatForFilteredEvents() throws Exception { final LogInterceptor logInterceptor = new LogInterceptor(); startConnector(config -> config .with(PostgresConnectorConfig.POLL_INTERVAL_MS, "50") .with(PostgresConnectorConfig.TABLE_WHITELIST, "s1\\.b") .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER), false); waitForStreamingToStart(); String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.a (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.b (pk SERIAL, bb integer, PRIMARY KEY(pk));" + "INSERT INTO s1.a (aa) VALUES (11);" + "INSERT INTO s1.b (bb) VALUES (22);"; consumer = testConsumer(1); executeAndWait(statement); final int filteredCount = 10_100; TestHelper.execute( IntStream.range(0, filteredCount) .mapToObj(x -> "INSERT INTO s1.a (pk) VALUES (default);") .collect(Collectors.joining())); Awaitility.await().alias("WAL growing log message").pollInterval(Duration.ONE_SECOND).atMost(Duration.TEN_SECONDS).until(() -> logInterceptor.containsWarnMessage( "Received 10001 events which were all filtered out, so no offset could be committed. This prevents the replication slot from acknowledging the processed WAL offsets, causing a growing backlog of non-removeable WAL segments on the database server. Consider to either adjust your filter configuration or enable heartbeat events (via the heartbeat.interval.ms option) to avoid this situation.")); } @Test @FixFor("DBZ-911") @SkipWhenDecoderPluginNameIs(value = PGOUTPUT, reason = "Decoder synchronizes all schema columns when processing relation messages") public void shouldNotRefreshSchemaOnUnchangedToastedData() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, PostgresConnectorConfig.SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); String toastedValue = RandomStringUtils.randomAlphanumeric(10000); // inserting a toasted value should /always/ produce a correct record String statement = "ALTER TABLE test_table ADD COLUMN not_toast integer; INSERT INTO test_table (not_toast, text) values (10, '" + toastedValue + "')"; consumer = testConsumer(1); executeAndWait(statement); SourceRecord record = consumer.remove(); // after record should contain the toasted value List<SchemaAndValueField> expectedAfter = Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)); assertRecordSchemaAndValues(expectedAfter, record, Envelope.FieldName.AFTER); // now we remove the toast column and update the not_toast column to see that our unchanged toast data // does not trigger a table schema refresh. the after schema should look the same as before. statement = "ALTER TABLE test_table DROP COLUMN text; update test_table set not_toast = 5 where not_toast = 10"; consumer.expects(1); executeAndWait(statement); assertWithTask(task -> { Table tbl = ((PostgresConnectorTask) task).getTaskContext().schema().tableFor(TableId.parse("public.test_table")); assertEquals(Arrays.asList("pk", "text", "not_toast"), tbl.retrieveColumnNames()); }); TestHelper.noTransactionActive(); } @Test @FixFor("DBZ-911") @SkipWhenDecoderPluginNameIsNot(value = SkipWhenDecoderPluginNameIsNot.DecoderPluginName.PGOUTPUT, reason = "Decoder synchronizes all schema columns when processing relation messages") public void shouldRefreshSchemaOnUnchangedToastedDataWhenSchemaChanged() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, PostgresConnectorConfig.SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); String toastedValue = RandomStringUtils.randomAlphanumeric(10000); // inserting a toasted value should /always/ produce a correct record String statement = "ALTER TABLE test_table ADD COLUMN not_toast integer; INSERT INTO test_table (not_toast, text) values (10, '" + toastedValue + "')"; consumer = testConsumer(1); executeAndWait(statement); SourceRecord record = consumer.remove(); // after record should contain the toasted value List<SchemaAndValueField> expectedAfter = Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)); assertRecordSchemaAndValues(expectedAfter, record, Envelope.FieldName.AFTER); // now we remove the toast column and update the not_toast column to see that our unchanged toast data // does trigger a table schema refresh. the after schema should be reflect the changes statement = "ALTER TABLE test_table DROP COLUMN text; update test_table set not_toast = 5 where not_toast = 10"; consumer.expects(1); executeAndWait(statement); assertWithTask(task -> { Table tbl = ((PostgresConnectorTask) task).getTaskContext().schema().tableFor(TableId.parse("public.test_table")); assertEquals(Arrays.asList("pk", "not_toast"), tbl.retrieveColumnNames()); }); } @Test @FixFor("DBZ-842") public void shouldNotPropagateUnchangedToastedData() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, PostgresConnectorConfig.SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); final String toastedValue1 = RandomStringUtils.randomAlphanumeric(10000); final String toastedValue2 = RandomStringUtils.randomAlphanumeric(10000); final String toastedValue3 = RandomStringUtils.randomAlphanumeric(10000); // inserting a toasted value should /always/ produce a correct record String statement = "ALTER TABLE test_table ADD COLUMN not_toast integer;" + "ALTER TABLE test_table ADD COLUMN mandatory_text TEXT NOT NULL DEFAULT '';" + "ALTER TABLE test_table ALTER COLUMN mandatory_text SET STORAGE EXTENDED;" + "ALTER TABLE test_table ALTER COLUMN mandatory_text SET DEFAULT '" + toastedValue3 + "';" + "INSERT INTO test_table (not_toast, text, mandatory_text) values (10, '" + toastedValue1 + "', '" + toastedValue1 + "');" + "INSERT INTO test_table (not_toast, text, mandatory_text) values (10, '" + toastedValue2 + "', '" + toastedValue2 + "');"; consumer = testConsumer(2); executeAndWait(statement); // after record should contain the toasted value assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue1), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, toastedValue1)), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue2), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, toastedValue2)), consumer.remove(), Envelope.FieldName.AFTER); statement = "UPDATE test_table SET not_toast = 2;" + "UPDATE test_table SET not_toast = 3;"; consumer.expects(6); executeAndWait(statement); consumer.process(record -> { assertWithTask(task -> { Table tbl = ((PostgresConnectorTask) task).getTaskContext().schema().tableFor(TableId.parse("public.test_table")); assertEquals(Arrays.asList("pk", "text", "not_toast", "mandatory_text"), tbl.retrieveColumnNames()); }); }); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 2), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "insert"), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, "")), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 2), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 2), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 3), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "insert"), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, "")), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 3), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 3), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); } @Test @FixFor("DBZ-1029") public void shouldReceiveChangesForTableWithoutPrimaryKey() throws Exception { TestHelper.execute( "DROP TABLE IF EXISTS test_table;", "CREATE TABLE test_table (id SERIAL, text TEXT);", "ALTER TABLE test_table REPLICA IDENTITY FULL"); startConnector(Function.identity(), false); consumer = testConsumer(1); // INSERT String statement = "INSERT INTO test_table (text) VALUES ('a');"; assertInsert( statement, Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), // SERIAL is NOT NULL implicitly new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "a"))); // UPDATE consumer.expects(1); executeAndWait("UPDATE test_table set text='b' WHERE id=1"); SourceRecord updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord); List<SchemaAndValueField> expectedBefore = Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "a")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); List<SchemaAndValueField> expectedAfter = Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "b")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // DELETE consumer.expects(2); executeAndWait("DELETE FROM test_table WHERE id=1"); SourceRecord deletedRecord = consumer.remove(); VerifyRecord.isValidDelete(deletedRecord); expectedBefore = Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "b")); assertRecordSchemaAndValues(expectedBefore, deletedRecord, Envelope.FieldName.BEFORE); expectedAfter = null; assertRecordSchemaAndValues(expectedAfter, deletedRecord, Envelope.FieldName.AFTER); } @Test() @FixFor("DBZ-1130") @SkipWhenDecoderPluginNameIsNot(value = WAL2JSON, reason = "WAL2JSON specific: Pass 'add-tables' stream parameter and verify it acts as a whitelist") public void testPassingStreamParams() throws Exception { // Verify that passing stream parameters works by using the WAL2JSON add-tables parameter which acts as a // whitelist. startConnector(config -> config .with(PostgresConnectorConfig.STREAM_PARAMS, "add-tables=s1.should_stream")); String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.should_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.should_not_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "INSERT INTO s1.should_not_stream (aa) VALUES (456);" + "INSERT INTO s1.should_stream (aa) VALUES (123);"; // Verify only one record made it consumer = testConsumer(1); executeAndWait(statement); // Verify the record that made it was from the whitelisted table assertRecordInserted("s1.should_stream", PK_FIELD, 1); assertThat(consumer.isEmpty()).isTrue(); } @Test() @FixFor("DBZ-1130") @SkipWhenDecoderPluginNameIsNot(value = WAL2JSON, reason = "WAL2JSON specific: Pass multiple stream parameters and values verifying they work") public void testPassingStreamMultipleParams() throws Exception { // Verify that passing multiple stream parameters and multiple parameter values works. startConnector(config -> config .with(PostgresConnectorConfig.STREAM_PARAMS, "add-tables=s1.should_stream,s2.*;filter-tables=s2.should_not_stream")); String statement = "CREATE SCHEMA s1;" + "CREATE SCHEMA s2;" + "CREATE TABLE s1.should_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s2.should_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.should_not_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s2.should_not_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "INSERT INTO s1.should_not_stream (aa) VALUES (456);" + "INSERT INTO s2.should_not_stream (aa) VALUES (111);" + "INSERT INTO s1.should_stream (aa) VALUES (123);" + "INSERT INTO s2.should_stream (aa) VALUES (999);"; // Verify only the whitelisted record from s1 and s2 made it. consumer = testConsumer(2); executeAndWait(statement); // Verify the record that made it was from the whitelisted table assertRecordInserted("s1.should_stream", PK_FIELD, 1); assertRecordInserted("s2.should_stream", PK_FIELD, 1); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromSnapshot() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST, true); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromStreaming() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST, false); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromSnapshotFullDiff() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF, true); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromStreamingFullDiff() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF, false); } @Test() @FixFor("DBZ-1181") public void testEmptyChangesProducesHeartbeat() throws Exception { // the low heartbeat interval should make sure that a heartbeat message is emitted after each change record // received from Postgres startConnector(config -> config.with(Heartbeat.HEARTBEAT_INTERVAL, "100")); TestHelper.execute( "DROP TABLE IF EXISTS test_table;" + "CREATE TABLE test_table (id SERIAL, text TEXT);" + "INSERT INTO test_table (text) VALUES ('mydata');"); // Expecting 1 data change Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); return record != null && Envelope.isEnvelopeSchema(record.valueSchema()); }); // Expecting one empty DDL change String statement = "CREATE SCHEMA s1;"; TestHelper.execute(statement); // Expecting changes for the empty DDL change final Set<Long> lsns = new HashSet<>(); Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); Assertions.assertThat(record.valueSchema().name()).endsWith(".Heartbeat"); lsns.add((Long) record.sourceOffset().get("lsn")); // CREATE SCHEMA should change LSN return lsns.size() > 1; }); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1082") public void shouldHaveNoXminWhenNotEnabled() throws Exception { startConnector(config -> config.with(PostgresConnectorConfig.XMIN_FETCH_INTERVAL, "0")); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); String statement = "INSERT INTO test_table (text) VALUES ('no_xmin');"; executeAndWait(statement); // Verify the record that made it does not have an xmin SourceRecord rec = assertRecordInserted("public.test_table", PK_FIELD, 2); assertSourceInfo(rec, "postgres", "public", "test_table"); Struct source = ((Struct) rec.value()).getStruct("source"); assertThat(source.getInt64("xmin")).isNull(); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1082") public void shouldHaveXminWhenEnabled() throws Exception { startConnector(config -> config.with(PostgresConnectorConfig.XMIN_FETCH_INTERVAL, "10")); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); String statement = "INSERT INTO test_table (text) VALUES ('with_xmin');"; executeAndWait(statement); // Verify the record that made it does not have an xmin SourceRecord rec = assertRecordInserted("public.test_table", PK_FIELD, 2); assertSourceInfo(rec, "postgres", "public", "test_table"); Struct source = ((Struct) rec.value()).getStruct("source"); assertThat(source.getInt64("xmin")).isGreaterThan(0L); assertThat(consumer.isEmpty()).isTrue(); } @Test public void shouldProcessLargerTx() throws Exception { Testing.Print.disable(); final int numberOfEvents = 1000; startConnector(); waitForStreamingToStart(); final String topicPrefix = "public.test_table"; final String topicName = topicName(topicPrefix); final Stopwatch stopwatch = Stopwatch.reusable(); consumer = testConsumer(numberOfEvents); // This is not accurate as we measure also including the data but // it is sufficient to confirm there is no large difference // in runtime between the cases stopwatch.start(); executeAndWait(IntStream.rangeClosed(2, numberOfEvents + 1) .boxed() .map(x -> "INSERT INTO test_table (text) VALUES ('insert" + x + "')") .collect(Collectors.joining(";"))); stopwatch.stop(); final long firstRun = stopwatch.durations().statistics().getTotal().toMillis(); logger.info("Single tx duration = {} ms", firstRun); for (int i = 0; i < numberOfEvents; i++) { SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidInsert(record, PK_FIELD, i + 2); } consumer.expects(numberOfEvents); IntStream.rangeClosed(2, numberOfEvents + 1).forEach(x -> TestHelper.execute("INSERT INTO test_table (text) VALUES ('insert" + x + "')")); stopwatch.start(); // There should be no significant difference between many TX runtime and single large TX // We still add generous limits as the runtime is in seconds and we cannot provide // a stable scheduling environment consumer.await(3 * firstRun, TimeUnit.MILLISECONDS); stopwatch.stop(); for (int i = 0; i < numberOfEvents; i++) { SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidInsert(record, PK_FIELD, i + 1002); } logger.info("Many tx duration = {} ms", stopwatch.durations().statistics().getTotal().toMillis()); } @Test @SkipWhenDecoderPluginNameIsNot(value = SkipWhenDecoderPluginNameIsNot.DecoderPluginName.PGOUTPUT, reason = "Tests specifically that pgoutput gracefully skips these messages") public void shouldGracefullySkipTruncateMessages() throws Exception { startConnector(); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO test_table (text) values ('TRUNCATE TEST');"); SourceRecord record = consumer.remove(); assertEquals(TestHelper.topicName("public.test_table"), record.topic()); VerifyRecord.isValidInsert(record, PK_FIELD, 2); consumer.expects(0); TestHelper.execute("TRUNCATE TABLE public.test_table;"); consumer.await(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS); assertTrue(consumer.isEmpty()); } @Test @FixFor("DBZ-1413") public void shouldStreamChangesForDataTypeAlias() throws Exception { TestHelper.execute("CREATE DOMAIN money2 AS money DEFAULT 0.0;"); TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, data VARCHAR(50), salary money, salary2 money2, PRIMARY KEY(pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table"), false); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (data, salary, salary2) values ('hello', 7.25, 8.25);"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField("pk", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("data", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "hello"), new SchemaAndValueField("salary", Decimal.builder(2).optional().build(), new BigDecimal(7.25)), new SchemaAndValueField("salary2", Decimal.builder(2).optional().build(), new BigDecimal(8.25))); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamChangesForDomainAliasAlterTable() throws Exception { TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, data VARCHAR(50), salary money, PRIMARY KEY(pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table") .with("column.propagate.source.type", "public.alias_table.salary3"), false); waitForStreamingToStart(); // Now that streaming has started, alter the table schema TestHelper.execute("CREATE DOMAIN money2 AS money DEFAULT 0.0;"); TestHelper.execute("CREATE DOMAIN money3 AS numeric(8,3) DEFAULT 0.0;"); TestHelper.execute("ALTER TABLE alias_table ADD COLUMN salary2 money2 NOT NULL;"); TestHelper.execute("ALTER TABLE alias_table ADD COLUMN salary3 money3 NOT NULL;"); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (data, salary, salary2, salary3) values ('hello', 7.25, 8.25, 123.456);"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField("pk", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("data", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "hello"), new SchemaAndValueField("salary", Decimal.builder(2).optional().build(), new BigDecimal(7.25)), new SchemaAndValueField("salary2", Decimal.builder(2).build(), new BigDecimal(8.25)), new SchemaAndValueField("salary3", SchemaBuilder.float64() .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "MONEY3") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, "8") .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "3") .build(), 123.456)); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamDomainAliasWithProperModifiers() throws Exception { TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, PRIMARY KEY(pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table"), false); waitForStreamingToStart(); TestHelper.execute("CREATE DOMAIN varbit2 AS varbit(3);"); TestHelper.execute("ALTER TABLE public.alias_table ADD COLUMN value varbit2 NOT NULL;"); consumer = testConsumer(1); executeAndWait("INSERT INTO public.alias_table (value) VALUES (B'101');"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("value", Bits.builder(3).build(), new byte[]{ 5, 0 })); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamValuesForDomainTypeOfDomainType() throws Exception { TestHelper.execute("CREATE DOMAIN numeric82 as numeric(8,2);"); TestHelper.execute("CREATE DOMAIN numericex as numeric82;"); TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, value numericex, PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table") .with("column.propagate.source.type", "public.alias_table.value"), false); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (value) values (123.45);"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("value", SpecialValueDecimal.builder(DecimalMode.DOUBLE, 8, 2) .optional() .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "NUMERICEX") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, "8") .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "2") .build(), 123.45)); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamValuesForAliasLikeBaseTypes() throws Exception { TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table"), false); waitForStreamingToStart(); // note: skipped macaddr8 as that is only supported on PG10+ but was manually tested TestHelper.execute("CREATE DOMAIN bit2 AS BIT(3);"); TestHelper.execute("CREATE DOMAIN smallint2 AS smallint;"); TestHelper.execute("CREATE DOMAIN integer2 as integer;"); TestHelper.execute("CREATE DOMAIN bigint2 as bigint;"); TestHelper.execute("CREATE DOMAIN real2 as real;"); TestHelper.execute("CREATE DOMAIN bool2 AS BOOL DEFAULT false;"); TestHelper.execute("CREATE DOMAIN float82 as float8;"); TestHelper.execute("CREATE DOMAIN numeric2 as numeric(6,2);"); TestHelper.execute("CREATE DOMAIN string2 AS varchar(25) DEFAULT NULL;"); TestHelper.execute("CREATE DOMAIN date2 AS date;"); TestHelper.execute("CREATE DOMAIN time2 as time;"); TestHelper.execute("CREATE DOMAIN timetz2 as timetz;"); TestHelper.execute("CREATE DOMAIN timestamp2 as timestamp;"); TestHelper.execute("CREATE DOMAIN timestamptz2 AS timestamptz;"); TestHelper.execute("CREATE DOMAIN timewotz2 as time without time zone;"); TestHelper.execute("CREATE DOMAIN box2 as box;"); TestHelper.execute("CREATE DOMAIN circle2 as circle;"); TestHelper.execute("CREATE DOMAIN interval2 as interval;"); TestHelper.execute("CREATE DOMAIN line2 as line;"); TestHelper.execute("CREATE DOMAIN lseg2 as lseg;"); TestHelper.execute("CREATE DOMAIN path2 as path;"); TestHelper.execute("CREATE DOMAIN point2 as point;"); TestHelper.execute("CREATE DOMAIN polygon2 as polygon;"); TestHelper.execute("CREATE DOMAIN char2 as char;"); TestHelper.execute("CREATE DOMAIN text2 as text;"); TestHelper.execute("CREATE DOMAIN json2 as json;"); TestHelper.execute("CREATE DOMAIN xml2 as xml;"); TestHelper.execute("CREATE DOMAIN uuid2 as uuid;"); TestHelper.execute("CREATE DOMAIN varbit2 as varbit(3);"); TestHelper.execute("CREATE DOMAIN inet2 as inet;"); TestHelper.execute("CREATE DOMAIN cidr2 as cidr;"); TestHelper.execute("CREATE DOMAIN macaddr2 as macaddr;"); TestHelper.execute("ALTER TABLE alias_table " + "ADD COLUMN bit_base bit(3) NOT NULL, ADD COLUMN bit_alias bit2 NOT NULL, " + "ADD COLUMN smallint_base smallint NOT NULL, ADD COLUMN smallint_alias smallint2 NOT NULL, " + "ADD COLUMN integer_base integer NOT NULL, ADD COLUMN integer_alias integer2 NOT NULL, " + "ADD COLUMN bigint_base bigint NOT NULL, ADD COLUMN bigint_alias bigint2 NOT NULL, " + "ADD COLUMN real_base real NOT NULL, ADD COLUMN real_alias real2 NOT NULL, " + "ADD COLUMN float8_base float8 NOT NULL, ADD COLUMN float8_alias float82 NOT NULL, " + "ADD COLUMN numeric_base numeric(6,2) NOT NULL, ADD COLUMN numeric_alias numeric2 NOT NULL, " + "ADD COLUMN bool_base bool NOT NULL, ADD COLUMN bool_alias bool2 NOT NULL, " + "ADD COLUMN string_base varchar(25) NOT NULL, ADD COLUMN string_alias string2 NOT NULL, " + "ADD COLUMN date_base date NOT NULL, ADD COLUMN date_alias date2 NOT NULL, " + "ADD COLUMN time_base time NOT NULL, ADD COLUMN time_alias time2 NOT NULL, " + "ADD COLUMN timetz_base timetz NOT NULL, ADD COLUMN timetz_alias timetz2 NOT NULL, " + "ADD COLUMN timestamp_base timestamp NOT NULL, ADD COLUMN timestamp_alias timestamp2 NOT NULL, " + "ADD COLUMN timestamptz_base timestamptz NOT NULL, ADD COLUMN timestamptz_alias timestamptz2 NOT NULL, " + "ADD COLUMN timewottz_base time without time zone NOT NULL, ADD COLUMN timewottz_alias timewotz2 NOT NULL, " + "ADD COLUMN box_base box NOT NULL, ADD COLUMN box_alias box2 NOT NULL, " + "ADD COLUMN circle_base circle NOT NULL, ADD COLUMN circle_alias circle2 NOT NULL, " + "ADD COLUMN interval_base interval NOT NULL, ADD COLUMN interval_alias interval2 NOT NULL, " + "ADD COLUMN line_base line NOT NULL, ADD COLUMN line_alias line2 NOT NULL, " + "ADD COLUMN lseg_base lseg NOT NULL, ADD COLUMN lseg_alias lseg2 NOT NULL, " + "ADD COLUMN path_base path NOT NULL, ADD COLUMN path_alias path2 NOT NULL, " + "ADD COLUMN point_base point NOT NULL, ADD COLUMN point_alias point2 NOT NULL, " + "ADD COLUMN polygon_base polygon NOT NULL, ADD COLUMN polygon_alias polygon2 NOT NULL, " + "ADD COLUMN char_base char NOT NULL, ADD COLUMN char_alias char2 NOT NULL, " + "ADD COLUMN text_base text NOT NULL, ADD COLUMN text_alias text2 NOT NULL, " + "ADD COLUMN json_base json NOT NULL, ADD COLUMN json_alias json2 NOT NULL, " + "ADD COLUMN xml_base xml NOT NULL, ADD COLUMN xml_alias xml2 NOT NULL, " + "ADD COLUMN uuid_base UUID NOT NULL, ADD COLUMN uuid_alias uuid2 NOT NULL, " + "ADD COLUMN varbit_base varbit(3) NOT NULL, ADD COLUMN varbit_alias varbit2 NOT NULL," + "ADD COLUMN inet_base inet NOT NULL, ADD COLUMN inet_alias inet2 NOT NULL, " + "ADD COLUMN cidr_base cidr NOT NULL, ADD COLUMN cidr_alias cidr2 NOT NULL, " + "ADD COLUMN macaddr_base macaddr NOT NULL, ADD COLUMN macaddr_alias macaddr2 NOT NULL"); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (" + "bit_base, bit_alias, " + "smallint_base, smallint_alias, " + "integer_base, integer_alias, " + "bigint_base, bigint_alias, " + "real_base, real_alias, " + "float8_base, float8_alias, " + "numeric_base, numeric_alias, " + "bool_base, bool_alias, " + "string_base, string_alias, " + "date_base, date_alias, " + "time_base, time_alias, " + "timetz_base, timetz_alias, " + "timestamp_base, timestamp_alias, " + "timestamptz_base, timestamptz_alias, " + "timewottz_base, timewottz_alias, " + "box_base, box_alias, " + "circle_base, circle_alias, " + "interval_base, interval_alias, " + "line_base, line_alias, " + "lseg_base, lseg_alias, " + "path_base, path_alias, " + "point_base, point_alias, " + "polygon_base, polygon_alias, " + "char_base, char_alias, " + "text_base, text_alias, " + "json_base, json_alias, " + "xml_base, xml_alias, " + "uuid_base, uuid_alias, " + "varbit_base, varbit_alias, " + "inet_base, inet_alias, " + "cidr_base, cidr_alias, " + "macaddr_base, macaddr_alias " + ") VALUES (" + "B'101', B'101', " + "1, 1, " + "1, 1, " + "1000, 1000, " + "3.14, 3.14, " + "3.14, 3.14, " + "1234.12, 1234.12, " + "true, true, " + "'hello', 'hello', " + "'2019-10-02', '2019-10-02', " + "'01:02:03', '01:02:03', " + "'01:02:03.123789Z', '01:02:03.123789Z', " + "'2019-10-02T01:02:03.123456', '2019-10-02T01:02:03.123456', " + "'2019-10-02T13:51:30.123456+02:00'::TIMESTAMPTZ, '2019-10-02T13:51:30.123456+02:00'::TIMESTAMPTZ, " + "'01:02:03', '01:02:03', " + "'(0,0),(1,1)', '(0,0),(1,1)', " + "'10,4,10', '10,4,10', " + "'1 year 2 months 3 days 4 hours 5 minutes 6 seconds', '1 year 2 months 3 days 4 hours 5 minutes 6 seconds', " + "'(0,0),(0,1)', '(0,0),(0,1)', " + "'((0,0),(0,1))', '((0,0),(0,1))', " + "'((0,0),(0,1),(0,2))', '((0,0),(0,1),(0,2))', " + "'(1,1)', '(1,1)', " + "'((0,0),(0,1),(1,0),(0,0))', '((0,0),(0,1),(1,0),(0,0))', " + "'a', 'a', " + "'Hello World', 'Hello World', " + "'{\"key\": \"value\"}', '{\"key\": \"value\"}', " + "XML('<foo>Hello</foo>'), XML('<foo>Hello</foo>'), " + "'40e6215d-b5c6-4896-987c-f30f3678f608', '40e6215d-b5c6-4896-987c-f30f3678f608', " + "B'101', B'101', " + "'192.168.0.1', '192.168.0.1', " + "'192.168/24', '192.168/24', " + "'08:00:2b:01:02:03', '08:00:2b:01:02:03' " + ");"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); assertRecordSchemaAndValues(schemasAndValuesForDomainAliasTypes(true), rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-920") public void shouldStreamEnumAsKnownType() throws Exception { // Specifically enable `column.propagate.source.type` here to validate later that the actual // type, length, and scale values are resolved correctly when paired with Enum types. TestHelper.execute("CREATE TABLE enum_table (pk SERIAL, PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with("column.propagate.source.type", "public.enum_table.value") .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.enum_table"), false); waitForStreamingToStart(); // We create the enum type after streaming started to simulate some future schema change TestHelper.execute("CREATE TYPE test_type AS ENUM ('V1','V2');"); TestHelper.execute("ALTER TABLE enum_table ADD COLUMN value test_type NOT NULL"); consumer = testConsumer(1); executeAndWait("INSERT INTO enum_table (value) VALUES ('V1');"); SourceRecord rec = assertRecordInserted("public.enum_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "enum_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, Schema.INT32_SCHEMA, 1), new SchemaAndValueField("value", Enum.builder("V1,V2") .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "TEST_TYPE") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, String.valueOf(Integer.MAX_VALUE)) .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "0") .build(), "V1")); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1680") public void shouldStreamEnumsWhenIncludeUnknownDataTypesDisabled() throws Exception { // Specifically enable `column.propagate.source.type` here to validate later that the actual // type, length, and scale values are resolved correctly when paired with Enum types. TestHelper.execute("CREATE TYPE test_type AS ENUM ('V1','V2');"); TestHelper.execute("CREATE TABLE enum_table (pk SERIAL, data varchar(25) NOT NULL, value test_type NOT NULL DEFAULT 'V1', PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, false) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with("column.propagate.source.type", "public.enum_table.value") .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.enum_table"), false); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO enum_table (data) VALUES ('hello');"); SourceRecord rec = assertRecordInserted("public.enum_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "enum_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, Schema.INT32_SCHEMA, 1), new SchemaAndValueField("data", Schema.STRING_SCHEMA, "hello"), new SchemaAndValueField("value", Enum.builder("V1,V2") .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "TEST_TYPE") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, String.valueOf(Integer.MAX_VALUE)) .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "0") .build(), "V1")); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } private long asEpochMicros(String timestamp) { Instant instant = LocalDateTime.parse(timestamp).atOffset(ZoneOffset.UTC).toInstant(); return instant.getEpochSecond() * 1_000_000 + instant.getNano() / 1_000; } private void testReceiveChangesForReplicaIdentityFullTableWithToastedValue(PostgresConnectorConfig.SchemaRefreshMode mode, boolean tablesBeforeStart) throws Exception { if (tablesBeforeStart) { TestHelper.execute( "DROP TABLE IF EXISTS test_table;", "CREATE TABLE test_table (id SERIAL, not_toast int, text TEXT);", "ALTER TABLE test_table REPLICA IDENTITY FULL"); } startConnector(config -> config.with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, mode), false); consumer = testConsumer(1); final String toastedValue = RandomStringUtils.randomAlphanumeric(10000); if (!tablesBeforeStart) { TestHelper.execute( "DROP TABLE IF EXISTS test_table;", "CREATE TABLE test_table (id SERIAL, not_toast int, text TEXT);", "ALTER TABLE test_table REPLICA IDENTITY FULL"); } // INSERT String statement = "INSERT INTO test_table (not_toast, text) VALUES (10,'" + toastedValue + "');"; assertInsert( statement, Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), // SERIAL is NOT NULL implicitly new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue))); // UPDATE consumer.expects(1); executeAndWait("UPDATE test_table set not_toast = 20"); SourceRecord updatedRecord = consumer.remove(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)), updatedRecord, Envelope.FieldName.AFTER); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20)), updatedRecord, Envelope.FieldName.AFTER); } // DELETE consumer.expects(2); executeAndWait("DELETE FROM test_table"); SourceRecord deletedRecord = consumer.remove(); SourceRecord tombstoneRecord = consumer.remove(); assertThat(tombstoneRecord.value()).isNull(); assertThat(tombstoneRecord.valueSchema()).isNull(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)), deletedRecord, Envelope.FieldName.BEFORE); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20)), deletedRecord, Envelope.FieldName.BEFORE); } // INSERT null consumer.expects(1); statement = "INSERT INTO test_table (not_toast, text) VALUES (100, null);"; assertInsert( statement, Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), // SERIAL is NOT NULL implicitly new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 100), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null))); // UPDATE null consumer.expects(1); executeAndWait("UPDATE test_table set not_toast = 200 WHERE id=2"); updatedRecord = consumer.remove(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 100), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null)), updatedRecord, Envelope.FieldName.AFTER); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 100)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200)), updatedRecord, Envelope.FieldName.AFTER); } // DELETE null consumer.expects(2); executeAndWait("DELETE FROM test_table WHERE id=2"); deletedRecord = consumer.remove(); tombstoneRecord = consumer.remove(); assertThat(tombstoneRecord.value()).isNull(); assertThat(tombstoneRecord.valueSchema()).isNull(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null)), deletedRecord, Envelope.FieldName.BEFORE); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200)), deletedRecord, Envelope.FieldName.BEFORE); } } private void assertHeartBeatRecordInserted() { assertFalse("records not generated", consumer.isEmpty()); assertHeartBeatRecord(consumer.remove()); } private void assertHeartBeatRecord(SourceRecord heartbeat) { assertEquals("__debezium-heartbeat." + TestHelper.TEST_SERVER, heartbeat.topic()); Struct key = (Struct) heartbeat.key(); assertThat(key.get("serverName")).isEqualTo(TestHelper.TEST_SERVER); Struct value = (Struct) heartbeat.value(); assertThat(value.getInt64("ts_ms")).isLessThanOrEqualTo(Instant.now().toEpochMilli()); } private Optional<SourceRecord> isHeartBeatRecordInserted() { assertFalse("records not generated", consumer.isEmpty()); final String heartbeatTopicName = "__debezium-heartbeat." + TestHelper.TEST_SERVER; SourceRecord record = consumer.remove(); if (!heartbeatTopicName.equals(record.topic())) { return Optional.of(record); } assertEquals(heartbeatTopicName, record.topic()); Struct key = (Struct) record.key(); assertThat(key.get("serverName")).isEqualTo(TestHelper.TEST_SERVER); Struct value = (Struct) record.value(); assertThat(value.getInt64("ts_ms")).isLessThanOrEqualTo(Instant.now().toEpochMilli()); return Optional.empty(); } private void assertInsert(String statement, List<SchemaAndValueField> expectedSchemaAndValuesByColumn) { assertInsert(statement, null, expectedSchemaAndValuesByColumn); } private void assertInsert(String statement, Integer pk, List<SchemaAndValueField> expectedSchemaAndValuesByColumn) { TableId table = tableIdFromInsertStmt(statement); String expectedTopicName = table.schema() + "." + table.table(); expectedTopicName = expectedTopicName.replaceAll("[ \"]", "_"); try { executeAndWait(statement); SourceRecord record = assertRecordInserted(expectedTopicName, pk != null ? PK_FIELD : null, pk); assertRecordOffsetAndSnapshotSource(record, false, false); assertSourceInfo(record, "postgres", table.schema(), table.table()); assertRecordSchemaAndValues(expectedSchemaAndValuesByColumn, record, Envelope.FieldName.AFTER); } catch (Exception e) { throw new RuntimeException(e); } } private SourceRecord assertRecordInserted(SourceRecord insertedRecord, String expectedTopicName, String pkColumn, Integer pk) throws InterruptedException { assertEquals(topicName(expectedTopicName), insertedRecord.topic()); if (pk != null) { VerifyRecord.isValidInsert(insertedRecord, pkColumn, pk); } else { VerifyRecord.isValidInsert(insertedRecord); } return insertedRecord; } private SourceRecord assertRecordInserted(String expectedTopicName, String pkColumn, Integer pk) throws InterruptedException { assertFalse("records not generated", consumer.isEmpty()); SourceRecord insertedRecord = consumer.remove(); return assertRecordInserted(insertedRecord, expectedTopicName, pkColumn, pk); } private void executeAndWait(String statements) throws Exception { TestHelper.execute(statements); consumer.await(TestHelper.waitTimeForRecords() * 30, TimeUnit.SECONDS); } private void executeAndWaitForNoRecords(String statements) throws Exception { TestHelper.execute(statements); consumer.await(5, TimeUnit.SECONDS); } }
debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/RecordsStreamProducerIT.java
/* * Copyright Debezium Authors. * * Licensed under the Apache Software License version 2.0, available at http://www.apache.org/licenses/LICENSE-2.0 */ package io.debezium.connector.postgresql; import static io.debezium.connector.postgresql.TestHelper.PK_FIELD; import static io.debezium.connector.postgresql.TestHelper.topicName; import static io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIs.DecoderPluginName.PGOUTPUT; import static io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIsNot.DecoderPluginName.WAL2JSON; import static junit.framework.TestCase.assertEquals; import static junit.framework.TestCase.assertTrue; import static org.fest.assertions.Assertions.assertThat; import static org.junit.Assert.assertFalse; import java.math.BigDecimal; import java.time.Instant; import java.time.LocalDateTime; import java.time.ZoneOffset; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.function.Function; import java.util.stream.Collectors; import java.util.stream.IntStream; import org.apache.commons.lang3.RandomStringUtils; import org.apache.kafka.connect.data.Decimal; import org.apache.kafka.connect.data.Schema; import org.apache.kafka.connect.data.SchemaBuilder; import org.apache.kafka.connect.data.Struct; import org.apache.kafka.connect.source.SourceRecord; import org.awaitility.Awaitility; import org.awaitility.Duration; import org.fest.assertions.Assertions; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestRule; import io.debezium.config.CommonConnectorConfig; import io.debezium.config.Configuration; import io.debezium.connector.postgresql.PostgresConnectorConfig.IntervalHandlingMode; import io.debezium.connector.postgresql.PostgresConnectorConfig.SchemaRefreshMode; import io.debezium.connector.postgresql.PostgresConnectorConfig.SnapshotMode; import io.debezium.connector.postgresql.junit.SkipTestDependingOnDecoderPluginNameRule; import io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIs; import io.debezium.connector.postgresql.junit.SkipWhenDecoderPluginNameIsNot; import io.debezium.data.Bits; import io.debezium.data.Enum; import io.debezium.data.Envelope; import io.debezium.data.SpecialValueDecimal; import io.debezium.data.VariableScaleDecimal; import io.debezium.data.VerifyRecord; import io.debezium.data.geometry.Point; import io.debezium.doc.FixFor; import io.debezium.heartbeat.Heartbeat; import io.debezium.jdbc.JdbcValueConverters.DecimalMode; import io.debezium.jdbc.TemporalPrecisionMode; import io.debezium.junit.ConditionalFail; import io.debezium.junit.ShouldFailWhen; import io.debezium.junit.logging.LogInterceptor; import io.debezium.relational.RelationalDatabaseConnectorConfig.DecimalHandlingMode; import io.debezium.relational.Table; import io.debezium.relational.TableId; import io.debezium.util.Stopwatch; import io.debezium.util.Testing; /** * Integration test for the {@link RecordsStreamProducer} class. This also tests indirectly the PG plugin functionality for * different use cases. * * @author Horia Chiorean ([email protected]) */ public class RecordsStreamProducerIT extends AbstractRecordsProducerTest { private TestConsumer consumer; @Rule public final TestRule skip = new SkipTestDependingOnDecoderPluginNameRule(); @Rule public TestRule conditionalFail = new ConditionalFail(); @Before public void before() throws Exception { // ensure the slot is deleted for each test TestHelper.dropAllSchemas(); TestHelper.executeDDL("init_postgis.ddl"); String statements = "CREATE SCHEMA IF NOT EXISTS public;" + "DROP TABLE IF EXISTS test_table;" + "CREATE TABLE test_table (pk SERIAL, text TEXT, PRIMARY KEY(pk));" + "CREATE TABLE table_with_interval (id SERIAL PRIMARY KEY, title VARCHAR(512) NOT NULL, time_limit INTERVAL DEFAULT '60 days'::INTERVAL NOT NULL);" + "INSERT INTO test_table(text) VALUES ('insert');"; TestHelper.execute(statements); Configuration.Builder configBuilder = TestHelper.defaultConfig() .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, false) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis"); // todo DBZ-766 are these really needed? if (TestHelper.decoderPlugin() == PostgresConnectorConfig.LogicalDecoder.PGOUTPUT) { configBuilder = configBuilder.with("database.replication", "database") .with("database.preferQueryMode", "simple") .with("assumeMinServerVersion.set", "9.4"); } Testing.Print.enable(); } private void startConnector(Function<Configuration.Builder, Configuration.Builder> customConfig, boolean waitForSnapshot) throws InterruptedException { start(PostgresConnector.class, new PostgresConnectorConfig(customConfig.apply(TestHelper.defaultConfig() .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, false) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.SNAPSHOT_MODE, waitForSnapshot ? SnapshotMode.INITIAL : SnapshotMode.NEVER)) .build()).getConfig()); assertConnectorIsRunning(); waitForStreamingToStart(); if (waitForSnapshot) { // Wait for snapshot to be in progress consumer = testConsumer(1); consumer.await(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS); consumer.remove(); } } private void startConnector(Function<Configuration.Builder, Configuration.Builder> customConfig) throws InterruptedException { startConnector(customConfig, true); } private void startConnector() throws InterruptedException { startConnector(Function.identity(), true); } @Test public void shouldReceiveChangesForInsertsWithDifferentDataTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); consumer = testConsumer(1); // numerical types consumer.expects(1); assertInsert(INSERT_NUMERIC_TYPES_STMT, 1, schemasAndValuesForNumericType()); // numerical decimal types consumer.expects(1); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT_NO_NAN, 1, schemasAndValuesForBigDecimalEncodedNumericTypes()); // string types consumer.expects(1); assertInsert(INSERT_STRING_TYPES_STMT, 1, schemasAndValuesForStringTypes()); // monetary types consumer.expects(1); assertInsert(INSERT_CASH_TYPES_STMT, 1, schemaAndValuesForMoneyTypes()); // bits and bytes consumer.expects(1); assertInsert(INSERT_BIN_TYPES_STMT, 1, schemaAndValuesForBinTypes()); // date and time consumer.expects(1); assertInsert(INSERT_DATE_TIME_TYPES_STMT, 1, schemaAndValuesForDateTimeTypes()); // text consumer.expects(1); assertInsert(INSERT_TEXT_TYPES_STMT, 1, schemasAndValuesForTextTypes()); // geom types consumer.expects(1); assertInsert(INSERT_GEOM_TYPES_STMT, 1, schemaAndValuesForGeomTypes()); // range types consumer.expects(1); assertInsert(INSERT_RANGE_TYPES_STMT, 1, schemaAndValuesForRangeTypes()); } @Test @FixFor("DBZ-1498") public void shouldReceiveChangesForIntervalAsString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config .with(PostgresConnectorConfig.INTERVAL_HANDLING_MODE, IntervalHandlingMode.STRING)); consumer = testConsumer(1); // date and time consumer.expects(1); assertInsert(INSERT_DATE_TIME_TYPES_STMT, 1, schemaAndValuesForIntervalAsString()); } @Test @FixFor("DBZ-766") public void shouldReceiveChangesAfterConnectionRestart() throws Exception { TestHelper.dropDefaultReplicationSlot(); TestHelper.dropPublication(); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis")); TestHelper.execute("CREATE TABLE t0 (pk SERIAL, d INTEGER, PRIMARY KEY(pk));"); consumer = testConsumer(1); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d) VALUES(1,1);"); assertRecordInserted("public.t0", PK_FIELD, 1); // simulate the connector is stopped stopConnector(); // Alter schema offline TestHelper.execute("ALTER TABLE t0 ADD COLUMN d2 INTEGER;"); TestHelper.execute("ALTER TABLE t0 ALTER COLUMN d SET NOT NULL;"); // Start the producer and wait; the wait is to guarantee the stream thread is polling // This appears to be a potential race condition problem startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis"), false); consumer = testConsumer(1); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d,d2) VALUES (2,1,3);"); assertRecordInserted("public.t0", PK_FIELD, 2); } @Test @FixFor("DBZ-1698") public void shouldReceiveUpdateSchemaAfterConnectionRestart() throws Exception { TestHelper.dropDefaultReplicationSlot(); TestHelper.dropPublication(); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.DROP_SLOT_ON_STOP, false) .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); TestHelper.execute("CREATE TABLE t0 (pk SERIAL, d INTEGER, PRIMARY KEY(pk));"); consumer = testConsumer(1); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d) VALUES(1,1);"); assertRecordInserted("public.t0", PK_FIELD, 1); // simulate the connector is stopped stopConnector(); Thread.sleep(3000); // Add record offline TestHelper.execute("INSERT INTO t0 (pk,d) VALUES(2,2);"); // Alter schema offline TestHelper.execute("ALTER TABLE t0 ADD COLUMN d2 NUMERIC(10,6) DEFAULT 0 NOT NULL;"); TestHelper.execute("ALTER TABLE t0 ALTER COLUMN d SET NOT NULL;"); // Start the producer and wait; the wait is to guarantee the stream thread is polling // This appears to be a potential race condition problem startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.DROP_SLOT_ON_STOP, false) .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST), false); consumer = testConsumer(2); waitForStreamingToStart(); // Insert new row and verify inserted executeAndWait("INSERT INTO t0 (pk,d,d2) VALUES (3,1,3);"); assertRecordInserted("public.t0", PK_FIELD, 2); assertRecordInserted("public.t0", PK_FIELD, 3); stopConnector(); TestHelper.dropDefaultReplicationSlot(); TestHelper.dropPublication(); } @Test public void shouldReceiveChangesForInsertsCustomTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true)); // custom types + null value assertInsert(INSERT_CUSTOM_TYPES_STMT, 1, schemasAndValuesForCustomTypes()); } @Test @FixFor("DBZ-1141") public void shouldProcessNotNullColumnsConnectDateTypes() throws Exception { final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.CONNECT); if (before != null) { Assertions.assertThat(before.get("created_at")).isEqualTo(new java.util.Date(0)); Assertions.assertThat(before.get("created_at_tz")).isEqualTo("1970-01-01T00:00:00Z"); Assertions.assertThat(before.get("ctime")).isEqualTo(new java.util.Date(0)); Assertions.assertThat(before.get("ctime_tz")).isEqualTo("00:00:00Z"); Assertions.assertThat(before.get("cdate")).isEqualTo(new java.util.Date(0)); Assertions.assertThat(before.get("cmoney")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cbits")).isEqualTo(new byte[0]); } } @Test @FixFor("DBZ-1141") public void shouldProcessNotNullColumnsAdaptiveDateTypes() throws Exception { final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.ADAPTIVE); if (before != null) { Assertions.assertThat(before.get("created_at")).isEqualTo(0L); Assertions.assertThat(before.get("created_at_tz")).isEqualTo("1970-01-01T00:00:00Z"); Assertions.assertThat(before.get("ctime")).isEqualTo(0L); Assertions.assertThat(before.get("ctime_tz")).isEqualTo("00:00:00Z"); Assertions.assertThat(before.get("cdate")).isEqualTo(0); Assertions.assertThat(before.get("cmoney")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cbits")).isEqualTo(new byte[0]); } } @Test @FixFor("DBZ-1141") public void shouldProcessNotNullColumnsAdaptiveMsDateTypes() throws Exception { final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.ADAPTIVE_TIME_MICROSECONDS); if (before != null) { Assertions.assertThat(before.get("created_at")).isEqualTo(0L); Assertions.assertThat(before.get("created_at_tz")).isEqualTo("1970-01-01T00:00:00Z"); Assertions.assertThat(before.get("ctime")).isEqualTo(0L); Assertions.assertThat(before.get("ctime_tz")).isEqualTo("00:00:00Z"); Assertions.assertThat(before.get("cdate")).isEqualTo(0); Assertions.assertThat(before.get("cmoney")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cbits")).isEqualTo(new byte[0]); } } @Test @FixFor("DBZ-1158") public void shouldProcessNotNullColumnsFallbacksReplicaIdentity() throws Exception { // Use adaptive here as its the connector default final Struct before = testProcessNotNullColumns(TemporalPrecisionMode.ADAPTIVE); if (before != null) { Assertions.assertThat(before.get("csmallint")).isEqualTo((short) 0); Assertions.assertThat(before.get("cinteger")).isEqualTo(0); Assertions.assertThat(before.get("cbigint")).isEqualTo(0L); Assertions.assertThat(before.get("creal")).isEqualTo(0.f); Assertions.assertThat(before.get("cbool")).isEqualTo(false); Assertions.assertThat(before.get("cfloat8")).isEqualTo(0.0); Assertions.assertThat(before.get("cnumeric")).isEqualTo(new BigDecimal("0.00")); Assertions.assertThat(before.get("cvarchar")).isEqualTo(""); Assertions.assertThat(before.get("cbox")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("ccircle")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cinterval")).isEqualTo(0L); Assertions.assertThat(before.get("cline")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("clseg")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cpath")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cpoint")).isEqualTo(Point.createValue(Point.builder().build(), 0, 0)); Assertions.assertThat(before.get("cpolygon")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cchar")).isEqualTo(""); Assertions.assertThat(before.get("ctext")).isEqualTo(""); Assertions.assertThat(before.get("cjson")).isEqualTo(""); Assertions.assertThat(before.get("cxml")).isEqualTo(""); Assertions.assertThat(before.get("cuuid")).isEqualTo(""); Assertions.assertThat(before.get("cvarbit")).isEqualTo(new byte[0]); Assertions.assertThat(before.get("cinet")).isEqualTo(""); Assertions.assertThat(before.get("ccidr")).isEqualTo(""); Assertions.assertThat(before.get("cmacaddr")).isEqualTo(""); } } private Struct testProcessNotNullColumns(TemporalPrecisionMode temporalMode) throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SCHEMA_BLACKLIST, "postgis") .with(PostgresConnectorConfig.TIME_PRECISION_MODE, temporalMode)); consumer.expects(1); executeAndWait("INSERT INTO not_null_table VALUES (default, 30, '2019-02-10 11:34:58', '2019-02-10 11:35:00', " + "'10:20:11', '10:20:12', '2019-02-01', '$20', B'101', 32766, 2147483646, 9223372036854775806, 3.14, " + "true, 3.14768, 1234.56, 'Test', '(0,0),(1,1)', '<(0,0),1>', '01:02:03', '{0,1,2}', '((0,0),(1,1))', " + "'((0,0),(0,1),(0,2))', '(1,1)', '((0,0),(0,1),(1,1))', 'a', 'hello world', '{\"key\": 123}', " + "'<doc><item>abc</item></doc>', 'a0eebc99-9c0b-4ef8-bb6d-6bb9bd380a11', B'101', '192.168.1.100', " + "'192.168.1', '08:00:2b:01:02:03');"); consumer.remove(); consumer.expects(1); executeAndWait("UPDATE not_null_table SET val=40"); final SourceRecord record = consumer.remove(); VerifyRecord.isValidUpdate(record, "pk", 1); VerifyRecord.isValid(record); return ((Struct) record.value()).getStruct("before"); } @Test(timeout = 30000) public void shouldReceiveChangesForInsertsWithPostgisTypes() throws Exception { TestHelper.executeDDL("postgis_create_tables.ddl"); startConnector(); consumer = testConsumer(1, "public"); // spatial_ref_sys produces a ton of records in the postgis schema consumer.setIgnoreExtraRecords(true); // need to wait for all the spatial_ref_sys to flow through and be ignored. // this exceeds the normal 2s timeout. TestHelper.execute("INSERT INTO public.dummy_table DEFAULT VALUES;"); consumer.await(TestHelper.waitTimeForRecords() * 10, TimeUnit.SECONDS); while (true) { if (!consumer.isEmpty()) { SourceRecord record = consumer.remove(); if (record.topic().endsWith(".public.dummy_table")) { break; } } } // now do it for actual testing // postgis types consumer.expects(1); assertInsert(INSERT_POSTGIS_TYPES_STMT, 1, schemaAndValuesForPostgisTypes()); } @Test(timeout = 30000) public void shouldReceiveChangesForInsertsWithPostgisArrayTypes() throws Exception { TestHelper.executeDDL("postgis_create_tables.ddl"); startConnector(); consumer = testConsumer(1, "public"); // spatial_ref_sys produces a ton of records in the postgis schema consumer.setIgnoreExtraRecords(true); // need to wait for all the spatial_ref_sys to flow through and be ignored. // this exceeds the normal 2s timeout. TestHelper.execute("INSERT INTO public.dummy_table DEFAULT VALUES;"); consumer.await(TestHelper.waitTimeForRecords() * 10, TimeUnit.SECONDS); while (true) { if (!consumer.isEmpty()) { SourceRecord record = consumer.remove(); if (record.topic().endsWith(".public.dummy_table")) { break; } } } // now do it for actual testing // postgis types consumer.expects(1); assertInsert(INSERT_POSTGIS_ARRAY_TYPES_STMT, 1, schemaAndValuesForPostgisArrayTypes()); } @Test @ShouldFailWhen(DecoderDifferences.AreQuotedIdentifiersUnsupported.class) // TODO DBZ-493 public void shouldReceiveChangesForInsertsWithQuotedNames() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); // Quoted column name assertInsert(INSERT_QUOTED_TYPES_STMT, 1, schemasAndValuesForQuotedTypes()); } @Test public void shouldReceiveChangesForInsertsWithArrayTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); assertInsert(INSERT_ARRAY_TYPES_STMT, 1, schemasAndValuesForArrayTypes()); } @Test @FixFor("DBZ-1029") public void shouldReceiveChangesForInsertsIndependentOfReplicaIdentity() throws Exception { // insert statement should not be affected by replica identity settings in any way startConnector(); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); String statement = "INSERT INTO test_table (text) VALUES ('pk_and_default');"; assertInsert(statement, 2, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "pk_and_default"))); consumer.expects(1); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY FULL;"); statement = "INSERT INTO test_table (text) VALUES ('pk_and_full');"; assertInsert(statement, 3, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "pk_and_full"))); consumer.expects(1); TestHelper.execute("ALTER TABLE test_table DROP CONSTRAINT test_table_pkey CASCADE;"); statement = "INSERT INTO test_table (pk, text) VALUES (4, 'no_pk_and_full');"; assertInsert(statement, 4, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "no_pk_and_full"))); consumer.expects(1); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); statement = "INSERT INTO test_table (pk, text) VALUES (5, 'no_pk_and_default');"; assertInsert(statement, 5, Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "no_pk_and_default"))); } @Test @FixFor("DBZ-478") public void shouldReceiveChangesForNullInsertsWithArrayTypes() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(); assertInsert(INSERT_ARRAY_TYPES_WITH_NULL_VALUES_STMT, 1, schemasAndValuesForArrayTypesWithNullValues()); } @Test public void shouldReceiveChangesForNewTable() throws Exception { String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.a (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "INSERT INTO s1.a (aa) VALUES (11);"; startConnector(); executeAndWait(statement); assertRecordInserted("s1.a", PK_FIELD, 1); } @Test public void shouldReceiveChangesForRenamedTable() throws Exception { String statement = "DROP TABLE IF EXISTS renamed_test_table;" + "ALTER TABLE test_table RENAME TO renamed_test_table;" + "INSERT INTO renamed_test_table (text) VALUES ('new');"; startConnector(); executeAndWait(statement); assertRecordInserted("public.renamed_test_table", PK_FIELD, 2); } @Test @SkipWhenDecoderPluginNameIs(value = PGOUTPUT, reason = "An update on a table with no primary key and default replica throws PSQLException as tables must have a PK") public void shouldReceiveChangesForUpdates() throws Exception { startConnector(); executeAndWait("UPDATE test_table set text='update' WHERE pk=1"); // the update record should be the last record SourceRecord updatedRecord = consumer.remove(); String topicName = topicName("public.test_table"); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // default replica identity only fires previous values for PK changes List<SchemaAndValueField> expectedAfter = Collections.singletonList( new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // alter the table and set its replica identity to full the issue another update consumer.expects(1); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY FULL"); executeAndWait("UPDATE test_table set text='update2' WHERE pk=1"); updatedRecord = consumer.remove(); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now we should get both old and new values List<SchemaAndValueField> expectedBefore = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); expectedAfter = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update2")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // without PK and with REPLICA IDENTITY FULL we still getting all fields 'before' and all fields 'after' TestHelper.execute("ALTER TABLE test_table DROP CONSTRAINT test_table_pkey CASCADE;"); consumer.expects(1); executeAndWait("UPDATE test_table SET text = 'update3' WHERE pk = 1;"); updatedRecord = consumer.remove(); assertEquals(topicName, updatedRecord.topic()); expectedBefore = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update2")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); expectedAfter = Collections.singletonList(new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update3")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // without PK and with REPLICA IDENTITY DEFAULT we will get nothing TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); consumer.expects(0); executeAndWaitForNoRecords("UPDATE test_table SET text = 'no_pk_and_default' WHERE pk = 1;"); assertThat(consumer.isEmpty()).isTrue(); } @Test public void shouldReceiveChangesForUpdatesWithColumnChanges() throws Exception { // add a new column String statements = "ALTER TABLE test_table ADD COLUMN uvc VARCHAR(2);" + "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "UPDATE test_table SET uvc ='aa' WHERE pk = 1;"; startConnector(); consumer = testConsumer(1); executeAndWait(statements); // the update should be the last record SourceRecord updatedRecord = consumer.remove(); String topicName = topicName("public.test_table"); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now check we got the updated value (the old value should be null, the new one whatever we set) List<SchemaAndValueField> expectedBefore = Collections.singletonList(new SchemaAndValueField("uvc", null, null)); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); List<SchemaAndValueField> expectedAfter = Collections.singletonList(new SchemaAndValueField("uvc", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "aa")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // rename a column statements = "ALTER TABLE test_table RENAME COLUMN uvc to xvc;" + "UPDATE test_table SET xvc ='bb' WHERE pk = 1;"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now check we got the updated value (the old value should be null, the new one whatever we set) expectedBefore = Collections.singletonList(new SchemaAndValueField("xvc", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "aa")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); expectedAfter = Collections.singletonList(new SchemaAndValueField("xvc", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "bb")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // drop a column statements = "ALTER TABLE test_table DROP COLUMN xvc;" + "UPDATE test_table SET text ='update' WHERE pk = 1;"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // change a column type statements = "ALTER TABLE test_table ADD COLUMN modtype INTEGER;" + "INSERT INTO test_table (pk,modtype) VALUES (2,1);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 2); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("modtype", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 1)), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN modtype TYPE SMALLINT;" + "UPDATE test_table SET modtype = 2 WHERE pk = 2;"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 2); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("modtype", SchemaBuilder.OPTIONAL_INT16_SCHEMA, (short) 1)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("modtype", SchemaBuilder.OPTIONAL_INT16_SCHEMA, (short) 2)), updatedRecord, Envelope.FieldName.AFTER); } @Test public void shouldReceiveChangesForUpdatesWithPKChanges() throws Exception { startConnector(); consumer = testConsumer(3); executeAndWait("UPDATE test_table SET text = 'update', pk = 2"); String topicName = topicName("public.test_table"); // first should be a delete of the old pk SourceRecord deleteRecord = consumer.remove(); assertEquals(topicName, deleteRecord.topic()); VerifyRecord.isValidDelete(deleteRecord, PK_FIELD, 1); // followed by a tombstone of the old pk SourceRecord tombstoneRecord = consumer.remove(); assertEquals(topicName, tombstoneRecord.topic()); VerifyRecord.isValidTombstone(tombstoneRecord, PK_FIELD, 1); // and finally insert of the new value SourceRecord insertRecord = consumer.remove(); assertEquals(topicName, insertRecord.topic()); VerifyRecord.isValidInsert(insertRecord, PK_FIELD, 2); } @Test @FixFor("DBZ-582") public void shouldReceiveChangesForUpdatesWithPKChangesWithoutTombstone() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(CommonConnectorConfig.TOMBSTONES_ON_DELETE, false)); consumer = testConsumer(2); executeAndWait("UPDATE test_table SET text = 'update', pk = 2"); String topicName = topicName("public.test_table"); // first should be a delete of the old pk SourceRecord deleteRecord = consumer.remove(); assertEquals(topicName, deleteRecord.topic()); VerifyRecord.isValidDelete(deleteRecord, PK_FIELD, 1); // followed by insert of the new value SourceRecord insertRecord = consumer.remove(); assertEquals(topicName, insertRecord.topic()); VerifyRecord.isValidInsert(insertRecord, PK_FIELD, 2); } @Test public void shouldReceiveChangesForDefaultValues() throws Exception { String statements = "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "ALTER TABLE test_table ADD COLUMN default_column TEXT DEFAULT 'default';" + "INSERT INTO test_table (text) VALUES ('update');"; startConnector(); consumer = testConsumer(1); executeAndWait(statements); SourceRecord insertRecord = consumer.remove(); assertEquals(topicName("public.test_table"), insertRecord.topic()); VerifyRecord.isValidInsert(insertRecord, PK_FIELD, 2); List<SchemaAndValueField> expectedSchemaAndValues = Arrays.asList( new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "update"), new SchemaAndValueField("default_column", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "default")); assertRecordSchemaAndValues(expectedSchemaAndValues, insertRecord, Envelope.FieldName.AFTER); } @Test public void shouldReceiveChangesForTypeConstraints() throws Exception { // add a new column String statements = "ALTER TABLE test_table ADD COLUMN num_val NUMERIC(5,2);" + "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "UPDATE test_table SET num_val = 123.45 WHERE pk = 1;"; startConnector(); consumer = testConsumer(1); executeAndWait(statements); // the update should be the last record SourceRecord updatedRecord = consumer.remove(); String topicName = topicName("public.test_table"); assertEquals(topicName, updatedRecord.topic()); VerifyRecord.isValidUpdate(updatedRecord, PK_FIELD, 1); // now check we got the updated value (the old value should be null, the new one whatever we set) List<SchemaAndValueField> expectedBefore = Collections.singletonList(new SchemaAndValueField("num_val", null, null)); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); List<SchemaAndValueField> expectedAfter = Collections.singletonList( new SchemaAndValueField("num_val", Decimal.builder(2).parameter(TestHelper.PRECISION_PARAMETER_KEY, "5").optional().build(), new BigDecimal("123.45"))); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // change a constraint statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE NUMERIC(6,1);" + "INSERT INTO test_table (pk,num_val) VALUES (2,123.41);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 2); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", Decimal.builder(1).parameter(TestHelper.PRECISION_PARAMETER_KEY, "6").optional().build(), new BigDecimal("123.4"))), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE NUMERIC;" + "INSERT INTO test_table (pk,num_val) VALUES (3,123.4567);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); final Struct dvs = new Struct(VariableScaleDecimal.schema()); dvs.put("scale", 4).put("value", new BigDecimal("123.4567").unscaledValue().toByteArray()); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 3); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", VariableScaleDecimal.builder().optional().build(), dvs)), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE DECIMAL(12,4);" + "INSERT INTO test_table (pk,num_val) VALUES (4,2.48);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 4); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", Decimal.builder(4).parameter(TestHelper.PRECISION_PARAMETER_KEY, "12").optional().build(), new BigDecimal("2.4800"))), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE DECIMAL(12);" + "INSERT INTO test_table (pk,num_val) VALUES (5,1238);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 5); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", Decimal.builder(0).parameter(TestHelper.PRECISION_PARAMETER_KEY, "12").optional().build(), new BigDecimal("1238"))), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val TYPE DECIMAL;" + "INSERT INTO test_table (pk,num_val) VALUES (6,1225.1);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); final Struct dvs2 = new Struct(VariableScaleDecimal.schema()); dvs2.put("scale", 1).put("value", new BigDecimal("1225.1").unscaledValue().toByteArray()); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 6); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", VariableScaleDecimal.builder().optional().build(), dvs2)), updatedRecord, Envelope.FieldName.AFTER); statements = "ALTER TABLE test_table ALTER COLUMN num_val SET NOT NULL;" + "INSERT INTO test_table (pk,num_val) VALUES (7,1976);"; consumer.expects(1); executeAndWait(statements); updatedRecord = consumer.remove(); dvs2.put("scale", 0).put("value", new BigDecimal("1976").unscaledValue().toByteArray()); VerifyRecord.isValidInsert(updatedRecord, PK_FIELD, 7); assertRecordSchemaAndValues( Collections.singletonList(new SchemaAndValueField("num_val", VariableScaleDecimal.builder().build(), dvs2)), updatedRecord, Envelope.FieldName.AFTER); } @Test public void shouldReceiveChangesForDeletes() throws Exception { // add a new entry and remove both String statements = "INSERT INTO test_table (text) VALUES ('insert2');" + "DELETE FROM test_table WHERE pk > 0;"; startConnector(); consumer = testConsumer(5); executeAndWait(statements); String topicPrefix = "public.test_table"; String topicName = topicName(topicPrefix); assertRecordInserted(topicPrefix, PK_FIELD, 2); // first entry removed SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 1); // followed by a tombstone record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidTombstone(record, PK_FIELD, 1); // second entry removed record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 2); // followed by a tombstone record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidTombstone(record, PK_FIELD, 2); } @Test @FixFor("DBZ-582") public void shouldReceiveChangesForDeletesWithoutTombstone() throws Exception { // add a new entry and remove both String statements = "INSERT INTO test_table (text) VALUES ('insert2');" + "DELETE FROM test_table WHERE pk > 0;"; startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(CommonConnectorConfig.TOMBSTONES_ON_DELETE, false)); consumer = testConsumer(3); executeAndWait(statements); String topicPrefix = "public.test_table"; String topicName = topicName(topicPrefix); assertRecordInserted(topicPrefix, PK_FIELD, 2); // first entry removed SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 1); // second entry removed record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 2); } @Test @SkipWhenDecoderPluginNameIs(value = PGOUTPUT, reason = "A delete on a table with no primary key and default replica throws PSQLException as tables must have a PK") public void shouldReceiveChangesForDeletesDependingOnReplicaIdentity() throws Exception { String topicName = topicName("public.test_table"); // With PK we should get delete event with default level of replica identity String statement = "ALTER TABLE test_table REPLICA IDENTITY DEFAULT;" + "DELETE FROM test_table WHERE pk = 1;"; startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(CommonConnectorConfig.TOMBSTONES_ON_DELETE, false)); consumer = testConsumer(1); executeAndWait(statement); SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 1); // Without PK we should get delete event with REPLICA IDENTITY FULL statement = "ALTER TABLE test_table REPLICA IDENTITY FULL;" + "ALTER TABLE test_table DROP CONSTRAINT test_table_pkey CASCADE;" + "INSERT INTO test_table (pk, text) VALUES (2, 'insert2');" + "DELETE FROM test_table WHERE pk = 2;"; consumer.expects(2); executeAndWait(statement); assertRecordInserted("public.test_table", PK_FIELD, 2); record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, PK_FIELD, 2); // Without PK and without REPLICA IDENTITY FULL we will not get delete event statement = "ALTER TABLE test_table REPLICA IDENTITY DEFAULT;" + "INSERT INTO test_table (pk, text) VALUES (3, 'insert3');" + "DELETE FROM test_table WHERE pk = 3;"; consumer.expects(1); executeAndWait(statement); assertRecordInserted("public.test_table", PK_FIELD, 3); assertThat(consumer.isEmpty()).isTrue(); } @Test public void shouldReceiveNumericTypeAsDouble() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE)); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT, 1, schemasAndValuesForDoubleEncodedNumericTypes()); } @Test @FixFor("DBZ-611") public void shouldReceiveNumericTypeAsString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.STRING)); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT, 1, schemasAndValuesForStringEncodedNumericTypes()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithSingleValueAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_STMT, 1, schemaAndValueFieldForMapEncodedHStoreType()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithMultipleValuesAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_WITH_MULTIPLE_VALUES_STMT, 1, schemaAndValueFieldForMapEncodedHStoreTypeWithMultipleValues()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithNullValuesAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_WITH_NULL_VALUES_STMT, 1, schemaAndValueFieldForMapEncodedHStoreTypeWithNullValues()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithSpecialCharactersInValuesAsMap() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.MAP)); assertInsert(INSERT_HSTORE_TYPE_WITH_SPECIAL_CHAR_STMT, 1, schemaAndValueFieldForMapEncodedHStoreTypeWithSpecialCharacters()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeAsJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); consumer = testConsumer(1); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreType()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithMultipleValuesAsJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_WITH_MULTIPLE_VALUES_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreTypeWithMultipleValues()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithSpecialValuesInJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_WITH_SPECIAL_CHAR_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreTypeWithSpcialCharacters()); } @Test @FixFor("DBZ-898") public void shouldReceiveHStoreTypeWithNullValuesAsJsonString() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with(PostgresConnectorConfig.HSTORE_HANDLING_MODE, PostgresConnectorConfig.HStoreHandlingMode.JSON)); assertInsert(INSERT_HSTORE_TYPE_WITH_NULL_VALUES_STMT, 1, schemaAndValueFieldForJsonEncodedHStoreTypeWithNullValues()); } @Test @FixFor("DBZ-259") public void shouldProcessIntervalDelete() throws Exception { final String statements = "INSERT INTO table_with_interval VALUES (default, 'Foo', default);" + "INSERT INTO table_with_interval VALUES (default, 'Bar', default);" + "DELETE FROM table_with_interval WHERE id = 1;"; startConnector(); consumer.expects(4); executeAndWait(statements); final String topicPrefix = "public.table_with_interval"; final String topicName = topicName(topicPrefix); final String pk = "id"; assertRecordInserted(topicPrefix, pk, 1); assertRecordInserted(topicPrefix, pk, 2); // first entry removed SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidDelete(record, pk, 1); // followed by a tombstone record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidTombstone(record, pk, 1); } @Test @FixFor("DBZ-644") public void shouldPropagateSourceColumnTypeToSchemaParameter() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config.with("column.propagate.source.type", ".*vc.*")); assertInsert(INSERT_STRING_TYPES_STMT, 1, schemasAndValuesForStringTypesWithSourceColumnTypeInfo()); } @Test @FixFor("DBZ-1073") public void shouldPropagateSourceColumnTypeScaleToSchemaParameter() throws Exception { TestHelper.executeDDL("postgres_create_tables.ddl"); startConnector(config -> config .with("column.propagate.source.type", ".*(d|dzs)") .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, PostgresConnectorConfig.DecimalHandlingMode.DOUBLE)); assertInsert(INSERT_NUMERIC_DECIMAL_TYPES_STMT, 1, schemasAndValuesForNumericTypesWithSourceColumnTypeInfo()); } @Test @FixFor("DBZ-800") public void shouldReceiveHeartbeatAlsoWhenChangingNonWhitelistedTable() throws Exception { // the low heartbeat interval should make sure that a heartbeat message is emitted after each change record // received from Postgres startConnector(config -> config .with(Heartbeat.HEARTBEAT_INTERVAL, "100") .with(PostgresConnectorConfig.POLL_INTERVAL_MS, "50") .with(PostgresConnectorConfig.TABLE_WHITELIST, "s1\\.b") .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER), false); waitForStreamingToStart(); String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.a (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.b (pk SERIAL, bb integer, PRIMARY KEY(pk));" + "INSERT INTO s1.b (bb) VALUES (22);"; TestHelper.execute(statement); final AtomicInteger heartbeatCount = new AtomicInteger(); Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); System.out.println(record); if (record != null) { if (record.topic().endsWith("s1.b")) { assertRecordInserted(record, "s1.b", PK_FIELD, 1); return true; } else { assertHeartBeatRecord(record); heartbeatCount.incrementAndGet(); } } return false; }); Assertions.assertThat(heartbeatCount.get()).isGreaterThan(0); final Set<Long> lsn = new HashSet<>(); TestHelper.execute("INSERT INTO s1.a (aa) VALUES (11);"); Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); if (record != null) { lsn.add((Long) record.sourceOffset().get("lsn")); return lsn.size() >= 2; } return false; }); Assertions.assertThat(lsn.size()).isGreaterThanOrEqualTo(2); } @Test @FixFor("DBZ-1565") public void shouldWarnOnMissingHeartbeatForFilteredEvents() throws Exception { final LogInterceptor logInterceptor = new LogInterceptor(); startConnector(config -> config .with(PostgresConnectorConfig.POLL_INTERVAL_MS, "50") .with(PostgresConnectorConfig.TABLE_WHITELIST, "s1\\.b") .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER), false); waitForStreamingToStart(); String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.a (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.b (pk SERIAL, bb integer, PRIMARY KEY(pk));" + "INSERT INTO s1.a (aa) VALUES (11);" + "INSERT INTO s1.b (bb) VALUES (22);"; consumer = testConsumer(1); executeAndWait(statement); final int filteredCount = 10_100; TestHelper.execute( IntStream.range(0, filteredCount) .mapToObj(x -> "INSERT INTO s1.a (pk) VALUES (default);") .collect(Collectors.joining())); Awaitility.await().alias("WAL growing log message").pollInterval(Duration.ONE_SECOND).atMost(Duration.TEN_SECONDS).until(() -> logInterceptor.containsWarnMessage( "Received 10001 events which were all filtered out, so no offset could be committed. This prevents the replication slot from acknowledging the processed WAL offsets, causing a growing backlog of non-removeable WAL segments on the database server. Consider to either adjust your filter configuration or enable heartbeat events (via the heartbeat.interval.ms option) to avoid this situation.")); } @Test @FixFor("DBZ-911") @SkipWhenDecoderPluginNameIs(value = PGOUTPUT, reason = "Decoder synchronizes all schema columns when processing relation messages") public void shouldNotRefreshSchemaOnUnchangedToastedData() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, PostgresConnectorConfig.SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); String toastedValue = RandomStringUtils.randomAlphanumeric(10000); // inserting a toasted value should /always/ produce a correct record String statement = "ALTER TABLE test_table ADD COLUMN not_toast integer; INSERT INTO test_table (not_toast, text) values (10, '" + toastedValue + "')"; consumer = testConsumer(1); executeAndWait(statement); SourceRecord record = consumer.remove(); // after record should contain the toasted value List<SchemaAndValueField> expectedAfter = Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)); assertRecordSchemaAndValues(expectedAfter, record, Envelope.FieldName.AFTER); // now we remove the toast column and update the not_toast column to see that our unchanged toast data // does not trigger a table schema refresh. the after schema should look the same as before. statement = "ALTER TABLE test_table DROP COLUMN text; update test_table set not_toast = 5 where not_toast = 10"; consumer.expects(1); executeAndWait(statement); assertWithTask(task -> { Table tbl = ((PostgresConnectorTask) task).getTaskContext().schema().tableFor(TableId.parse("public.test_table")); assertEquals(Arrays.asList("pk", "text", "not_toast"), tbl.retrieveColumnNames()); }); TestHelper.noTransactionActive(); } @Test @FixFor("DBZ-911") @SkipWhenDecoderPluginNameIsNot(value = SkipWhenDecoderPluginNameIsNot.DecoderPluginName.PGOUTPUT, reason = "Decoder synchronizes all schema columns when processing relation messages") public void shouldRefreshSchemaOnUnchangedToastedDataWhenSchemaChanged() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, PostgresConnectorConfig.SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); String toastedValue = RandomStringUtils.randomAlphanumeric(10000); // inserting a toasted value should /always/ produce a correct record String statement = "ALTER TABLE test_table ADD COLUMN not_toast integer; INSERT INTO test_table (not_toast, text) values (10, '" + toastedValue + "')"; consumer = testConsumer(1); executeAndWait(statement); SourceRecord record = consumer.remove(); // after record should contain the toasted value List<SchemaAndValueField> expectedAfter = Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)); assertRecordSchemaAndValues(expectedAfter, record, Envelope.FieldName.AFTER); // now we remove the toast column and update the not_toast column to see that our unchanged toast data // does trigger a table schema refresh. the after schema should be reflect the changes statement = "ALTER TABLE test_table DROP COLUMN text; update test_table set not_toast = 5 where not_toast = 10"; consumer.expects(1); executeAndWait(statement); assertWithTask(task -> { Table tbl = ((PostgresConnectorTask) task).getTaskContext().schema().tableFor(TableId.parse("public.test_table")); assertEquals(Arrays.asList("pk", "not_toast"), tbl.retrieveColumnNames()); }); } @Test @FixFor("DBZ-842") public void shouldNotPropagateUnchangedToastedData() throws Exception { startConnector(config -> config .with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, PostgresConnectorConfig.SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST)); final String toastedValue1 = RandomStringUtils.randomAlphanumeric(10000); final String toastedValue2 = RandomStringUtils.randomAlphanumeric(10000); final String toastedValue3 = RandomStringUtils.randomAlphanumeric(10000); // inserting a toasted value should /always/ produce a correct record String statement = "ALTER TABLE test_table ADD COLUMN not_toast integer;" + "ALTER TABLE test_table ADD COLUMN mandatory_text TEXT NOT NULL DEFAULT '';" + "ALTER TABLE test_table ALTER COLUMN mandatory_text SET STORAGE EXTENDED;" + "ALTER TABLE test_table ALTER COLUMN mandatory_text SET DEFAULT '" + toastedValue3 + "';" + "INSERT INTO test_table (not_toast, text, mandatory_text) values (10, '" + toastedValue1 + "', '" + toastedValue1 + "');" + "INSERT INTO test_table (not_toast, text, mandatory_text) values (10, '" + toastedValue2 + "', '" + toastedValue2 + "');"; consumer = testConsumer(2); executeAndWait(statement); // after record should contain the toasted value assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue1), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, toastedValue1)), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue2), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, toastedValue2)), consumer.remove(), Envelope.FieldName.AFTER); statement = "UPDATE test_table SET not_toast = 2;" + "UPDATE test_table SET not_toast = 3;"; consumer.expects(6); executeAndWait(statement); consumer.process(record -> { assertWithTask(task -> { Table tbl = ((PostgresConnectorTask) task).getTaskContext().schema().tableFor(TableId.parse("public.test_table")); assertEquals(Arrays.asList("pk", "text", "not_toast", "mandatory_text"), tbl.retrieveColumnNames()); }); }); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 2), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "insert"), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, "")), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 2), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 2), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 3), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "insert"), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, "")), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 3), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 3), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, DecoderDifferences.optionalToastedValuePlaceholder()), new SchemaAndValueField("mandatory_text", SchemaBuilder.STRING_SCHEMA, DecoderDifferences.mandatoryToastedValuePlaceholder())), consumer.remove(), Envelope.FieldName.AFTER); } @Test @FixFor("DBZ-1029") public void shouldReceiveChangesForTableWithoutPrimaryKey() throws Exception { TestHelper.execute( "DROP TABLE IF EXISTS test_table;", "CREATE TABLE test_table (id SERIAL, text TEXT);", "ALTER TABLE test_table REPLICA IDENTITY FULL"); startConnector(Function.identity(), false); consumer = testConsumer(1); // INSERT String statement = "INSERT INTO test_table (text) VALUES ('a');"; assertInsert( statement, Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), // SERIAL is NOT NULL implicitly new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "a"))); // UPDATE consumer.expects(1); executeAndWait("UPDATE test_table set text='b' WHERE id=1"); SourceRecord updatedRecord = consumer.remove(); VerifyRecord.isValidUpdate(updatedRecord); List<SchemaAndValueField> expectedBefore = Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "a")); assertRecordSchemaAndValues(expectedBefore, updatedRecord, Envelope.FieldName.BEFORE); List<SchemaAndValueField> expectedAfter = Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "b")); assertRecordSchemaAndValues(expectedAfter, updatedRecord, Envelope.FieldName.AFTER); // DELETE consumer.expects(2); executeAndWait("DELETE FROM test_table WHERE id=1"); SourceRecord deletedRecord = consumer.remove(); VerifyRecord.isValidDelete(deletedRecord); expectedBefore = Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "b")); assertRecordSchemaAndValues(expectedBefore, deletedRecord, Envelope.FieldName.BEFORE); expectedAfter = null; assertRecordSchemaAndValues(expectedAfter, deletedRecord, Envelope.FieldName.AFTER); } @Test() @FixFor("DBZ-1130") @SkipWhenDecoderPluginNameIsNot(value = WAL2JSON, reason = "WAL2JSON specific: Pass 'add-tables' stream parameter and verify it acts as a whitelist") public void testPassingStreamParams() throws Exception { // Verify that passing stream parameters works by using the WAL2JSON add-tables parameter which acts as a // whitelist. startConnector(config -> config .with(PostgresConnectorConfig.STREAM_PARAMS, "add-tables=s1.should_stream")); String statement = "CREATE SCHEMA s1;" + "CREATE TABLE s1.should_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.should_not_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "INSERT INTO s1.should_not_stream (aa) VALUES (456);" + "INSERT INTO s1.should_stream (aa) VALUES (123);"; // Verify only one record made it consumer = testConsumer(1); executeAndWait(statement); // Verify the record that made it was from the whitelisted table assertRecordInserted("s1.should_stream", PK_FIELD, 1); assertThat(consumer.isEmpty()).isTrue(); } @Test() @FixFor("DBZ-1130") @SkipWhenDecoderPluginNameIsNot(value = WAL2JSON, reason = "WAL2JSON specific: Pass multiple stream parameters and values verifying they work") public void testPassingStreamMultipleParams() throws Exception { // Verify that passing multiple stream parameters and multiple parameter values works. startConnector(config -> config .with(PostgresConnectorConfig.STREAM_PARAMS, "add-tables=s1.should_stream,s2.*;filter-tables=s2.should_not_stream")); String statement = "CREATE SCHEMA s1;" + "CREATE SCHEMA s2;" + "CREATE TABLE s1.should_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s2.should_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s1.should_not_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "CREATE TABLE s2.should_not_stream (pk SERIAL, aa integer, PRIMARY KEY(pk));" + "INSERT INTO s1.should_not_stream (aa) VALUES (456);" + "INSERT INTO s2.should_not_stream (aa) VALUES (111);" + "INSERT INTO s1.should_stream (aa) VALUES (123);" + "INSERT INTO s2.should_stream (aa) VALUES (999);"; // Verify only the whitelisted record from s1 and s2 made it. consumer = testConsumer(2); executeAndWait(statement); // Verify the record that made it was from the whitelisted table assertRecordInserted("s1.should_stream", PK_FIELD, 1); assertRecordInserted("s2.should_stream", PK_FIELD, 1); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromSnapshot() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST, true); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromStreaming() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST, false); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromSnapshotFullDiff() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF, true); } @Test @FixFor("DBZ-1146") public void shouldReceiveChangesForReplicaIdentityFullTableWithToastedValueTableFromStreamingFullDiff() throws Exception { testReceiveChangesForReplicaIdentityFullTableWithToastedValue(SchemaRefreshMode.COLUMNS_DIFF, false); } @Test() @FixFor("DBZ-1181") public void testEmptyChangesProducesHeartbeat() throws Exception { // the low heartbeat interval should make sure that a heartbeat message is emitted after each change record // received from Postgres startConnector(config -> config.with(Heartbeat.HEARTBEAT_INTERVAL, "100")); TestHelper.execute( "DROP TABLE IF EXISTS test_table;" + "CREATE TABLE test_table (id SERIAL, text TEXT);" + "INSERT INTO test_table (text) VALUES ('mydata');"); // Expecting 1 data change Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); return record != null && Envelope.isEnvelopeSchema(record.valueSchema()); }); // Expecting one empty DDL change String statement = "CREATE SCHEMA s1;"; TestHelper.execute(statement); // Expecting changes for the empty DDL change final Set<Long> lsns = new HashSet<>(); Awaitility.await().atMost(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS).until(() -> { final SourceRecord record = consumeRecord(); Assertions.assertThat(record.valueSchema().name()).endsWith(".Heartbeat"); lsns.add((Long) record.sourceOffset().get("lsn")); // CREATE SCHEMA should change LSN return lsns.size() > 1; }); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1082") public void shouldHaveNoXminWhenNotEnabled() throws Exception { startConnector(config -> config.with(PostgresConnectorConfig.XMIN_FETCH_INTERVAL, "0")); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); String statement = "INSERT INTO test_table (text) VALUES ('no_xmin');"; executeAndWait(statement); // Verify the record that made it does not have an xmin SourceRecord rec = assertRecordInserted("public.test_table", PK_FIELD, 2); assertSourceInfo(rec, "postgres", "public", "test_table"); Struct source = ((Struct) rec.value()).getStruct("source"); assertThat(source.getInt64("xmin")).isNull(); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1082") public void shouldHaveXminWhenEnabled() throws Exception { startConnector(config -> config.with(PostgresConnectorConfig.XMIN_FETCH_INTERVAL, "10")); TestHelper.execute("ALTER TABLE test_table REPLICA IDENTITY DEFAULT;"); String statement = "INSERT INTO test_table (text) VALUES ('with_xmin');"; executeAndWait(statement); // Verify the record that made it does not have an xmin SourceRecord rec = assertRecordInserted("public.test_table", PK_FIELD, 2); assertSourceInfo(rec, "postgres", "public", "test_table"); Struct source = ((Struct) rec.value()).getStruct("source"); assertThat(source.getInt64("xmin")).isGreaterThan(0L); assertThat(consumer.isEmpty()).isTrue(); } @Test public void shouldProcessLargerTx() throws Exception { Testing.Print.disable(); final int numberOfEvents = 1000; startConnector(); waitForStreamingToStart(); final String topicPrefix = "public.test_table"; final String topicName = topicName(topicPrefix); final Stopwatch stopwatch = Stopwatch.reusable(); consumer = testConsumer(numberOfEvents); // This is not accurate as we measure also including the data but // it is sufficient to confirm there is no large difference // in runtime between the cases stopwatch.start(); executeAndWait(IntStream.rangeClosed(2, numberOfEvents + 1) .boxed() .map(x -> "INSERT INTO test_table (text) VALUES ('insert" + x + "')") .collect(Collectors.joining(";"))); stopwatch.stop(); final long firstRun = stopwatch.durations().statistics().getTotal().toMillis(); logger.info("Single tx duration = {} ms", firstRun); for (int i = 0; i < numberOfEvents; i++) { SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidInsert(record, PK_FIELD, i + 2); } consumer.expects(numberOfEvents); IntStream.rangeClosed(2, numberOfEvents + 1).forEach(x -> TestHelper.execute("INSERT INTO test_table (text) VALUES ('insert" + x + "')")); stopwatch.start(); // There should be no significant difference between many TX runtime and single large TX // We still add generous limits as the runtime is in seconds and we cannot provide // a stable scheduling environment consumer.await(3 * firstRun, TimeUnit.MILLISECONDS); stopwatch.stop(); for (int i = 0; i < numberOfEvents; i++) { SourceRecord record = consumer.remove(); assertEquals(topicName, record.topic()); VerifyRecord.isValidInsert(record, PK_FIELD, i + 1002); } logger.info("Many tx duration = {} ms", stopwatch.durations().statistics().getTotal().toMillis()); } @Test @SkipWhenDecoderPluginNameIsNot(value = SkipWhenDecoderPluginNameIsNot.DecoderPluginName.PGOUTPUT, reason = "Tests specifically that pgoutput gracefully skips these messages") public void shouldGracefullySkipTruncateMessages() throws Exception { startConnector(); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO test_table (text) values ('TRUNCATE TEST');"); SourceRecord record = consumer.remove(); assertEquals(TestHelper.topicName("public.test_table"), record.topic()); VerifyRecord.isValidInsert(record, PK_FIELD, 2); consumer.expects(0); TestHelper.execute("TRUNCATE TABLE public.test_table;"); consumer.await(TestHelper.waitTimeForRecords(), TimeUnit.SECONDS); assertTrue(consumer.isEmpty()); } @Test @FixFor("DBZ-1413") public void shouldStreamChangesForDataTypeAlias() throws Exception { TestHelper.execute("CREATE DOMAIN money2 AS money DEFAULT 0.0;"); TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, data VARCHAR(50), salary money, salary2 money2, PRIMARY KEY(pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.INITIAL) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table"), false); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (data, salary, salary2) values ('hello', 7.25, 8.25);"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField("pk", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("data", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "hello"), new SchemaAndValueField("salary", Decimal.builder(2).optional().build(), new BigDecimal(7.25)), new SchemaAndValueField("salary2", Decimal.builder(2).optional().build(), new BigDecimal(8.25))); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamChangesForDomainAliasAlterTable() throws Exception { TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, data VARCHAR(50), salary money, PRIMARY KEY(pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table") .with("column.propagate.source.type", "public.alias_table.salary3"), false); waitForStreamingToStart(); // Now that streaming has started, alter the table schema TestHelper.execute("CREATE DOMAIN money2 AS money DEFAULT 0.0;"); TestHelper.execute("CREATE DOMAIN money3 AS numeric(8,3) DEFAULT 0.0;"); TestHelper.execute("ALTER TABLE alias_table ADD COLUMN salary2 money2 NOT NULL;"); TestHelper.execute("ALTER TABLE alias_table ADD COLUMN salary3 money3 NOT NULL;"); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (data, salary, salary2, salary3) values ('hello', 7.25, 8.25, 123.456);"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField("pk", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("data", SchemaBuilder.OPTIONAL_STRING_SCHEMA, "hello"), new SchemaAndValueField("salary", Decimal.builder(2).optional().build(), new BigDecimal(7.25)), new SchemaAndValueField("salary2", Decimal.builder(2).build(), new BigDecimal(8.25)), new SchemaAndValueField("salary3", SchemaBuilder.float64() .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "MONEY3") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, "8") .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "3") .build(), 123.456)); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamDomainAliasWithProperModifiers() throws Exception { TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, PRIMARY KEY(pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table"), false); waitForStreamingToStart(); TestHelper.execute("CREATE DOMAIN varbit2 AS varbit(3);"); TestHelper.execute("ALTER TABLE public.alias_table ADD COLUMN value varbit2 NOT NULL;"); consumer = testConsumer(1); executeAndWait("INSERT INTO public.alias_table (value) VALUES (B'101');"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("value", Bits.builder(3).build(), new byte[]{ 5, 0 })); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamValuesForDomainTypeOfDomainType() throws Exception { TestHelper.execute("CREATE DOMAIN numeric82 as numeric(8,2);"); TestHelper.execute("CREATE DOMAIN numericex as numeric82;"); TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, value numericex, PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table") .with("column.propagate.source.type", "public.alias_table.value"), false); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (value) values (123.45);"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("value", SpecialValueDecimal.builder(DecimalMode.DOUBLE, 8, 2) .optional() .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "NUMERICEX") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, "8") .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "2") .build(), 123.45)); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1413") public void shouldStreamValuesForAliasLikeBaseTypes() throws Exception { TestHelper.execute("CREATE TABLE alias_table (pk SERIAL, PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.DECIMAL_HANDLING_MODE, DecimalHandlingMode.DOUBLE) .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.alias_table"), false); waitForStreamingToStart(); // note: skipped macaddr8 as that is only supported on PG10+ but was manually tested TestHelper.execute("CREATE DOMAIN bit2 AS BIT(3);"); TestHelper.execute("CREATE DOMAIN smallint2 AS smallint;"); TestHelper.execute("CREATE DOMAIN integer2 as integer;"); TestHelper.execute("CREATE DOMAIN bigint2 as bigint;"); TestHelper.execute("CREATE DOMAIN real2 as real;"); TestHelper.execute("CREATE DOMAIN bool2 AS BOOL DEFAULT false;"); TestHelper.execute("CREATE DOMAIN float82 as float8;"); TestHelper.execute("CREATE DOMAIN numeric2 as numeric(6,2);"); TestHelper.execute("CREATE DOMAIN string2 AS varchar(25) DEFAULT NULL;"); TestHelper.execute("CREATE DOMAIN date2 AS date;"); TestHelper.execute("CREATE DOMAIN time2 as time;"); TestHelper.execute("CREATE DOMAIN timetz2 as timetz;"); TestHelper.execute("CREATE DOMAIN timestamp2 as timestamp;"); TestHelper.execute("CREATE DOMAIN timestamptz2 AS timestamptz;"); TestHelper.execute("CREATE DOMAIN timewotz2 as time without time zone;"); TestHelper.execute("CREATE DOMAIN box2 as box;"); TestHelper.execute("CREATE DOMAIN circle2 as circle;"); TestHelper.execute("CREATE DOMAIN interval2 as interval;"); TestHelper.execute("CREATE DOMAIN line2 as line;"); TestHelper.execute("CREATE DOMAIN lseg2 as lseg;"); TestHelper.execute("CREATE DOMAIN path2 as path;"); TestHelper.execute("CREATE DOMAIN point2 as point;"); TestHelper.execute("CREATE DOMAIN polygon2 as polygon;"); TestHelper.execute("CREATE DOMAIN char2 as char;"); TestHelper.execute("CREATE DOMAIN text2 as text;"); TestHelper.execute("CREATE DOMAIN json2 as json;"); TestHelper.execute("CREATE DOMAIN xml2 as xml;"); TestHelper.execute("CREATE DOMAIN uuid2 as uuid;"); TestHelper.execute("CREATE DOMAIN varbit2 as varbit(3);"); TestHelper.execute("CREATE DOMAIN inet2 as inet;"); TestHelper.execute("CREATE DOMAIN cidr2 as cidr;"); TestHelper.execute("CREATE DOMAIN macaddr2 as macaddr;"); TestHelper.execute("ALTER TABLE alias_table " + "ADD COLUMN bit_base bit(3) NOT NULL, ADD COLUMN bit_alias bit2 NOT NULL, " + "ADD COLUMN smallint_base smallint NOT NULL, ADD COLUMN smallint_alias smallint2 NOT NULL, " + "ADD COLUMN integer_base integer NOT NULL, ADD COLUMN integer_alias integer2 NOT NULL, " + "ADD COLUMN bigint_base bigint NOT NULL, ADD COLUMN bigint_alias bigint2 NOT NULL, " + "ADD COLUMN real_base real NOT NULL, ADD COLUMN real_alias real2 NOT NULL, " + "ADD COLUMN float8_base float8 NOT NULL, ADD COLUMN float8_alias float82 NOT NULL, " + "ADD COLUMN numeric_base numeric(6,2) NOT NULL, ADD COLUMN numeric_alias numeric2 NOT NULL, " + "ADD COLUMN bool_base bool NOT NULL, ADD COLUMN bool_alias bool2 NOT NULL, " + "ADD COLUMN string_base varchar(25) NOT NULL, ADD COLUMN string_alias string2 NOT NULL, " + "ADD COLUMN date_base date NOT NULL, ADD COLUMN date_alias date2 NOT NULL, " + "ADD COLUMN time_base time NOT NULL, ADD COLUMN time_alias time2 NOT NULL, " + "ADD COLUMN timetz_base timetz NOT NULL, ADD COLUMN timetz_alias timetz2 NOT NULL, " + "ADD COLUMN timestamp_base timestamp NOT NULL, ADD COLUMN timestamp_alias timestamp2 NOT NULL, " + "ADD COLUMN timestamptz_base timestamptz NOT NULL, ADD COLUMN timestamptz_alias timestamptz2 NOT NULL, " + "ADD COLUMN timewottz_base time without time zone NOT NULL, ADD COLUMN timewottz_alias timewotz2 NOT NULL, " + "ADD COLUMN box_base box NOT NULL, ADD COLUMN box_alias box2 NOT NULL, " + "ADD COLUMN circle_base circle NOT NULL, ADD COLUMN circle_alias circle2 NOT NULL, " + "ADD COLUMN interval_base interval NOT NULL, ADD COLUMN interval_alias interval2 NOT NULL, " + "ADD COLUMN line_base line NOT NULL, ADD COLUMN line_alias line2 NOT NULL, " + "ADD COLUMN lseg_base lseg NOT NULL, ADD COLUMN lseg_alias lseg2 NOT NULL, " + "ADD COLUMN path_base path NOT NULL, ADD COLUMN path_alias path2 NOT NULL, " + "ADD COLUMN point_base point NOT NULL, ADD COLUMN point_alias point2 NOT NULL, " + "ADD COLUMN polygon_base polygon NOT NULL, ADD COLUMN polygon_alias polygon2 NOT NULL, " + "ADD COLUMN char_base char NOT NULL, ADD COLUMN char_alias char2 NOT NULL, " + "ADD COLUMN text_base text NOT NULL, ADD COLUMN text_alias text2 NOT NULL, " + "ADD COLUMN json_base json NOT NULL, ADD COLUMN json_alias json2 NOT NULL, " + "ADD COLUMN xml_base xml NOT NULL, ADD COLUMN xml_alias xml2 NOT NULL, " + "ADD COLUMN uuid_base UUID NOT NULL, ADD COLUMN uuid_alias uuid2 NOT NULL, " + "ADD COLUMN varbit_base varbit(3) NOT NULL, ADD COLUMN varbit_alias varbit2 NOT NULL," + "ADD COLUMN inet_base inet NOT NULL, ADD COLUMN inet_alias inet2 NOT NULL, " + "ADD COLUMN cidr_base cidr NOT NULL, ADD COLUMN cidr_alias cidr2 NOT NULL, " + "ADD COLUMN macaddr_base macaddr NOT NULL, ADD COLUMN macaddr_alias macaddr2 NOT NULL"); consumer = testConsumer(1); executeAndWait("INSERT INTO alias_table (" + "bit_base, bit_alias, " + "smallint_base, smallint_alias, " + "integer_base, integer_alias, " + "bigint_base, bigint_alias, " + "real_base, real_alias, " + "float8_base, float8_alias, " + "numeric_base, numeric_alias, " + "bool_base, bool_alias, " + "string_base, string_alias, " + "date_base, date_alias, " + "time_base, time_alias, " + "timetz_base, timetz_alias, " + "timestamp_base, timestamp_alias, " + "timestamptz_base, timestamptz_alias, " + "timewottz_base, timewottz_alias, " + "box_base, box_alias, " + "circle_base, circle_alias, " + "interval_base, interval_alias, " + "line_base, line_alias, " + "lseg_base, lseg_alias, " + "path_base, path_alias, " + "point_base, point_alias, " + "polygon_base, polygon_alias, " + "char_base, char_alias, " + "text_base, text_alias, " + "json_base, json_alias, " + "xml_base, xml_alias, " + "uuid_base, uuid_alias, " + "varbit_base, varbit_alias, " + "inet_base, inet_alias, " + "cidr_base, cidr_alias, " + "macaddr_base, macaddr_alias " + ") VALUES (" + "B'101', B'101', " + "1, 1, " + "1, 1, " + "1000, 1000, " + "3.14, 3.14, " + "3.14, 3.14, " + "1234.12, 1234.12, " + "true, true, " + "'hello', 'hello', " + "'2019-10-02', '2019-10-02', " + "'01:02:03', '01:02:03', " + "'01:02:03.123789Z', '01:02:03.123789Z', " + "'2019-10-02T01:02:03.123456', '2019-10-02T01:02:03.123456', " + "'2019-10-02T13:51:30.123456+02:00'::TIMESTAMPTZ, '2019-10-02T13:51:30.123456+02:00'::TIMESTAMPTZ, " + "'01:02:03', '01:02:03', " + "'(0,0),(1,1)', '(0,0),(1,1)', " + "'10,4,10', '10,4,10', " + "'1 year 2 months 3 days 4 hours 5 minutes 6 seconds', '1 year 2 months 3 days 4 hours 5 minutes 6 seconds', " + "'(0,0),(0,1)', '(0,0),(0,1)', " + "'((0,0),(0,1))', '((0,0),(0,1))', " + "'((0,0),(0,1),(0,2))', '((0,0),(0,1),(0,2))', " + "'(1,1)', '(1,1)', " + "'((0,0),(0,1),(1,0),(0,0))', '((0,0),(0,1),(1,0),(0,0))', " + "'a', 'a', " + "'Hello World', 'Hello World', " + "'{\"key\": \"value\"}', '{\"key\": \"value\"}', " + "XML('<foo>Hello</foo>'), XML('<foo>Hello</foo>'), " + "'40e6215d-b5c6-4896-987c-f30f3678f608', '40e6215d-b5c6-4896-987c-f30f3678f608', " + "B'101', B'101', " + "'192.168.0.1', '192.168.0.1', " + "'192.168/24', '192.168/24', " + "'08:00:2b:01:02:03', '08:00:2b:01:02:03' " + ");"); SourceRecord rec = assertRecordInserted("public.alias_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "alias_table"); assertRecordSchemaAndValues(schemasAndValuesForDomainAliasTypes(true), rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-920") public void shouldStreamEnumAsKnownType() throws Exception { // Specifically enable `column.propagate.source.type` here to validate later that the actual // type, length, and scale values are resolved correctly when paired with Enum types. TestHelper.execute("CREATE TABLE enum_table (pk SERIAL, PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, true) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with("column.propagate.source.type", "public.enum_table.value") .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.enum_table"), false); waitForStreamingToStart(); // We create the enum type after streaming started to simulate some future schema change TestHelper.execute("CREATE TYPE test_type AS ENUM ('V1','V2');"); TestHelper.execute("ALTER TABLE enum_table ADD COLUMN value test_type NOT NULL"); consumer = testConsumer(1); executeAndWait("INSERT INTO enum_table (value) VALUES ('V1');"); SourceRecord rec = assertRecordInserted("public.enum_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "enum_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, Schema.INT32_SCHEMA, 1), new SchemaAndValueField("value", Enum.builder("V1,V2") .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "TEST_TYPE") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, String.valueOf(Integer.MAX_VALUE)) .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "0") .build(), "V1")); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } @Test @FixFor("DBZ-1680") public void shouldStreamEnumsWhenIncludeUnknownDataTypesDisabled() throws Exception { // Specifically enable `column.propagate.source.type` here to validate later that the actual // type, length, and scale values are resolved correctly when paired with Enum types. TestHelper.execute("CREATE TYPE test_type AS ENUM ('V1','V2');"); TestHelper.execute("CREATE TABLE enum_table (pk SERIAL, data varchar(25) NOT NULL, value test_type NOT NULL DEFAULT 'V1', PRIMARY KEY (pk));"); startConnector(config -> config .with(PostgresConnectorConfig.INCLUDE_UNKNOWN_DATATYPES, false) .with(PostgresConnectorConfig.SNAPSHOT_MODE, SnapshotMode.NEVER) .with("column.propagate.source.type", "public.enum_table.value") .with(PostgresConnectorConfig.TABLE_WHITELIST, "public.enum_table"), false); waitForStreamingToStart(); consumer = testConsumer(1); executeAndWait("INSERT INTO enum_table (data) VALUES ('hello');"); SourceRecord rec = assertRecordInserted("public.enum_table", PK_FIELD, 1); assertSourceInfo(rec, "postgres", "public", "enum_table"); List<SchemaAndValueField> expected = Arrays.asList( new SchemaAndValueField(PK_FIELD, Schema.INT32_SCHEMA, 1), new SchemaAndValueField("data", Schema.STRING_SCHEMA, "hello"), new SchemaAndValueField("value", Enum.builder("V1,V2") .parameter(TestHelper.TYPE_NAME_PARAMETER_KEY, "TEST_TYPE") .parameter(TestHelper.TYPE_LENGTH_PARAMETER_KEY, String.valueOf(Integer.MAX_VALUE)) .parameter(TestHelper.TYPE_SCALE_PARAMETER_KEY, "0") .build(), "V1")); assertRecordSchemaAndValues(expected, rec, Envelope.FieldName.AFTER); assertThat(consumer.isEmpty()).isTrue(); } private long asEpochMicros(String timestamp) { Instant instant = LocalDateTime.parse(timestamp).atOffset(ZoneOffset.UTC).toInstant(); return instant.getEpochSecond() * 1_000_000 + instant.getNano() / 1_000; } private void testReceiveChangesForReplicaIdentityFullTableWithToastedValue(PostgresConnectorConfig.SchemaRefreshMode mode, boolean tablesBeforeStart) throws Exception { if (tablesBeforeStart) { TestHelper.execute( "DROP TABLE IF EXISTS test_table;", "CREATE TABLE test_table (id SERIAL, not_toast int, text TEXT);", "ALTER TABLE test_table REPLICA IDENTITY FULL"); } startConnector(config -> config.with(PostgresConnectorConfig.SCHEMA_REFRESH_MODE, mode), false); consumer = testConsumer(1); final String toastedValue = RandomStringUtils.randomAlphanumeric(10000); if (!tablesBeforeStart) { TestHelper.execute( "DROP TABLE IF EXISTS test_table;", "CREATE TABLE test_table (id SERIAL, not_toast int, text TEXT);", "ALTER TABLE test_table REPLICA IDENTITY FULL"); } // INSERT String statement = "INSERT INTO test_table (not_toast, text) VALUES (10,'" + toastedValue + "');"; assertInsert( statement, Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), // SERIAL is NOT NULL implicitly new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue))); // UPDATE consumer.expects(1); executeAndWait("UPDATE test_table set not_toast = 20"); SourceRecord updatedRecord = consumer.remove(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)), updatedRecord, Envelope.FieldName.AFTER); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 10)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20)), updatedRecord, Envelope.FieldName.AFTER); } // DELETE consumer.expects(2); executeAndWait("DELETE FROM test_table"); SourceRecord deletedRecord = consumer.remove(); SourceRecord tombstoneRecord = consumer.remove(); assertThat(tombstoneRecord.value()).isNull(); assertThat(tombstoneRecord.valueSchema()).isNull(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, toastedValue)), deletedRecord, Envelope.FieldName.BEFORE); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 1), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 20)), deletedRecord, Envelope.FieldName.BEFORE); } // INSERT null consumer.expects(1); statement = "INSERT INTO test_table (not_toast, text) VALUES (100, null);"; assertInsert( statement, Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), // SERIAL is NOT NULL implicitly new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 100), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null))); // UPDATE null consumer.expects(1); executeAndWait("UPDATE test_table set not_toast = 200 WHERE id=2"); updatedRecord = consumer.remove(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 100), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null)), updatedRecord, Envelope.FieldName.AFTER); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 100)), updatedRecord, Envelope.FieldName.BEFORE); assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200)), updatedRecord, Envelope.FieldName.AFTER); } // DELETE null consumer.expects(2); executeAndWait("DELETE FROM test_table WHERE id=2"); deletedRecord = consumer.remove(); tombstoneRecord = consumer.remove(); assertThat(tombstoneRecord.value()).isNull(); assertThat(tombstoneRecord.valueSchema()).isNull(); if (DecoderDifferences.areToastedValuesPresentInSchema() || mode == SchemaRefreshMode.COLUMNS_DIFF_EXCLUDE_UNCHANGED_TOAST) { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200), new SchemaAndValueField("text", SchemaBuilder.OPTIONAL_STRING_SCHEMA, null)), deletedRecord, Envelope.FieldName.BEFORE); } else { assertRecordSchemaAndValues(Arrays.asList( new SchemaAndValueField("id", SchemaBuilder.INT32_SCHEMA, 2), new SchemaAndValueField("not_toast", SchemaBuilder.OPTIONAL_INT32_SCHEMA, 200)), deletedRecord, Envelope.FieldName.BEFORE); } } private void assertHeartBeatRecordInserted() { assertFalse("records not generated", consumer.isEmpty()); assertHeartBeatRecord(consumer.remove()); } private void assertHeartBeatRecord(SourceRecord heartbeat) { assertEquals("__debezium-heartbeat." + TestHelper.TEST_SERVER, heartbeat.topic()); Struct key = (Struct) heartbeat.key(); assertThat(key.get("serverName")).isEqualTo(TestHelper.TEST_SERVER); Struct value = (Struct) heartbeat.value(); assertThat(value.getInt64("ts_ms")).isLessThanOrEqualTo(Instant.now().toEpochMilli()); } private Optional<SourceRecord> isHeartBeatRecordInserted() { assertFalse("records not generated", consumer.isEmpty()); final String heartbeatTopicName = "__debezium-heartbeat." + TestHelper.TEST_SERVER; SourceRecord record = consumer.remove(); if (!heartbeatTopicName.equals(record.topic())) { return Optional.of(record); } assertEquals(heartbeatTopicName, record.topic()); Struct key = (Struct) record.key(); assertThat(key.get("serverName")).isEqualTo(TestHelper.TEST_SERVER); Struct value = (Struct) record.value(); assertThat(value.getInt64("ts_ms")).isLessThanOrEqualTo(Instant.now().toEpochMilli()); return Optional.empty(); } private void assertInsert(String statement, List<SchemaAndValueField> expectedSchemaAndValuesByColumn) { assertInsert(statement, null, expectedSchemaAndValuesByColumn); } private void assertInsert(String statement, Integer pk, List<SchemaAndValueField> expectedSchemaAndValuesByColumn) { TableId table = tableIdFromInsertStmt(statement); String expectedTopicName = table.schema() + "." + table.table(); expectedTopicName = expectedTopicName.replaceAll("[ \"]", "_"); try { executeAndWait(statement); SourceRecord record = assertRecordInserted(expectedTopicName, pk != null ? PK_FIELD : null, pk); assertRecordOffsetAndSnapshotSource(record, false, false); assertSourceInfo(record, "postgres", table.schema(), table.table()); assertRecordSchemaAndValues(expectedSchemaAndValuesByColumn, record, Envelope.FieldName.AFTER); } catch (Exception e) { throw new RuntimeException(e); } } private SourceRecord assertRecordInserted(SourceRecord insertedRecord, String expectedTopicName, String pkColumn, Integer pk) throws InterruptedException { assertEquals(topicName(expectedTopicName), insertedRecord.topic()); if (pk != null) { VerifyRecord.isValidInsert(insertedRecord, pkColumn, pk); } else { VerifyRecord.isValidInsert(insertedRecord); } return insertedRecord; } private SourceRecord assertRecordInserted(String expectedTopicName, String pkColumn, Integer pk) throws InterruptedException { assertFalse("records not generated", consumer.isEmpty()); SourceRecord insertedRecord = consumer.remove(); return assertRecordInserted(insertedRecord, expectedTopicName, pkColumn, pk); } private void executeAndWait(String statements) throws Exception { TestHelper.execute(statements); consumer.await(TestHelper.waitTimeForRecords() * 30, TimeUnit.SECONDS); } private void executeAndWaitForNoRecords(String statements) throws Exception { TestHelper.execute(statements); consumer.await(5, TimeUnit.SECONDS); } }
DBZ-1727 Update heartbeat on changing non whitelisted table test
debezium-connector-postgres/src/test/java/io/debezium/connector/postgresql/RecordsStreamProducerIT.java
DBZ-1727 Update heartbeat on changing non whitelisted table test
Java
apache-2.0
dc6b3e9348b2db6d771f93656b36c13bb27d85c1
0
IHTSDO/snomed-drools
package org.ihtsdo.drools.rulestestrig; import org.ihtsdo.drools.RuleExecutor; import org.ihtsdo.drools.RuleExecutorFactory; import org.ihtsdo.drools.domain.Concept; import org.ihtsdo.drools.domain.Constants; import org.ihtsdo.drools.domain.OntologyAxiom; import org.ihtsdo.drools.response.InvalidContent; import org.ihtsdo.drools.rulestestrig.domain.*; import org.ihtsdo.drools.rulestestrig.service.TestConceptService; import org.ihtsdo.drools.rulestestrig.service.TestDescriptionService; import org.ihtsdo.drools.rulestestrig.service.TestRelationshipService; import org.ihtsdo.drools.service.TestResourceProvider; import org.ihtsdo.otf.resourcemanager.ManualResourceConfiguration; import org.ihtsdo.otf.resourcemanager.ResourceConfiguration; import org.ihtsdo.otf.resourcemanager.ResourceManager; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.io.File; import java.io.FileNotFoundException; import java.util.*; @RunWith(Parameterized.class) public class RulesTestManual { private static final String GIVEN_CONCEPTS = "givenConcepts"; private static final String ASSERT_CONCEPTS_PASS = "assertConceptsPass"; private static final String ASSERT_CONCEPTS_FAIL = "assertConceptsFail"; private final RuleExecutor ruleExecutor; private final Map<String, Concept> concepts; private Map<String, List<TestConcept<TestDescription, TestRelationship>>> testConcepts; private Map<String, String> ruleIdToMessageMap = new HashMap<>(); private TestConceptService conceptService; private TestDescriptionService descriptionService; private TestRelationshipService relationshipService; @Parameters(name = "{0}") public static Iterable<?> data() { final String rulesPath = "../../snomed-drools-rules"; // relative path to snomed-drools-rules, either check out to here or use symlink final File rulesDirectory = new File(rulesPath); Assert.assertTrue(rulesDirectory.isDirectory()); final List<File> ruleDirectories = new ArrayList<>(); for (File productGroupDirectory : Objects.requireNonNull(rulesDirectory.listFiles(TestUtil.DIRECTORY_FILTER))) { for (File ruleGroupDirectory : Objects.requireNonNull(productGroupDirectory.listFiles(TestUtil.DIRECTORY_FILTER))) { for (File ruleDirectory : Objects.requireNonNull(ruleGroupDirectory.listFiles(TestUtil.DIRECTORY_FILTER))) { final File[] ruleFiles = ruleDirectory.listFiles(TestUtil.RULE_FILE_FILTER); if (ruleFiles != null && ruleFiles.length > 0) { ruleDirectories.add(ruleDirectory); } } } } return ruleDirectories; } public RulesTestManual(File ruleDirectory) { this.ruleExecutor = new RuleExecutorFactory().createRuleExecutor(ruleDirectory.getAbsolutePath(), "OneRule"); this.concepts = new HashMap<>(); final File testCasesFile = new File(ruleDirectory, "test-cases.json"); if (testCasesFile.isFile()) { try { testConcepts = TestUtil.loadConceptMap(testCasesFile); } catch (FileNotFoundException e) { throw new AssertionError("Unexpected FileNotFoundException", e); } setConceptIdReferencesAndTempIds(testConcepts); final List<TestConcept<TestDescription, TestRelationship>> givenConcepts = testConcepts.get(GIVEN_CONCEPTS); if (givenConcepts != null) { for (TestConcept<TestDescription, TestRelationship> givenConcept : givenConcepts) { String id = givenConcept.getId(); Assert.assertNotNull("Concepts in the set '" + GIVEN_CONCEPTS + "' must have an ID", id); concepts.put(id, givenConcept); } } } } @Before public void setup() { ManualResourceConfiguration resourceConfiguration = new ManualResourceConfiguration(true, false, new ResourceConfiguration.Local("src/test/resources/dummy-test-resources"), null); TestResourceProvider testResourceProvider = this.ruleExecutor.newTestResourceProvider(new ResourceManager(resourceConfiguration, null)); conceptService = new TestConceptService(concepts); descriptionService = new TestDescriptionService(concepts, testResourceProvider); relationshipService = new TestRelationshipService(concepts); } @Test public void testRulesInDirectory() { final List<TestConcept<TestDescription, TestRelationship>> conceptsThatShouldPass = testConcepts.get(ASSERT_CONCEPTS_PASS); Assert.assertNotNull("The set of concepts '" + ASSERT_CONCEPTS_PASS + "' is required.", conceptsThatShouldPass); executeRulesAndAssertExpectations(ruleExecutor, conceptsThatShouldPass, true); final List<TestConcept<TestDescription, TestRelationship>> conceptsThatShouldFail = testConcepts.get(ASSERT_CONCEPTS_FAIL); Assert.assertNotNull("The set of concepts '" + ASSERT_CONCEPTS_FAIL + "' is required.", conceptsThatShouldPass); executeRulesAndAssertExpectations(ruleExecutor, conceptsThatShouldFail, false); } private void setConceptIdReferencesAndTempIds(Map<String, List<TestConcept<TestDescription, TestRelationship>>> testConcepts) { for (List<TestConcept<TestDescription, TestRelationship>> concepts : testConcepts.values()) { for (TestConcept<TestDescription, TestRelationship> concept : concepts) { setTempIdIfMissing(concept); final String id = concept.getId(); for (TestRelationship relationship : concept.getRelationships()) { setTempIdIfMissing(relationship); relationship.setSourceId(id); } for (TestDescription description : concept.getDescriptions()) { setTempIdIfMissing(description); description.setConceptId(id); if (description.isTextDefinition()) { description.setTypeId(Constants.TEXT_DEFINITION); } } for (OntologyAxiom ontologyAxiom : concept.getOntologyAxioms()) { TestOntologyAxiom testOntologyAxiom = (TestOntologyAxiom) ontologyAxiom; setTempIdIfMissing(testOntologyAxiom); testOntologyAxiom.setReferencedComponentId(id); } } } } private void setTempIdIfMissing(TestComponent component) { if (component.getId() == null) { component.setId("temp-id-" + UUID.randomUUID()); } } private void executeRulesAndAssertExpectations(RuleExecutor ruleExecutor, List<TestConcept<TestDescription, TestRelationship>> conceptsToTest, boolean expectPass) { for (TestConcept<TestDescription, TestRelationship> concept : conceptsToTest) { final HashSet<String> ruleSetNames = new HashSet<>(); ruleSetNames.add("OneRule"); final List<InvalidContent> invalidContent = ruleExecutor.execute( ruleSetNames, Collections.singleton(concept), conceptService, descriptionService, relationshipService, false, false); Set<String> uniqueComponentAssertionSet = new HashSet<>(); for (InvalidContent content : invalidContent) { String pair = content.getComponent().getId() + " " + content.getMessage(); if (!uniqueComponentAssertionSet.add(pair)) { Assert.fail("Component failures should not be reported multiple times. Duplicate component/message found: " + pair); } // Attempt to prevent multiple assertions using the same rule id.. this does not guarantee uniqueness because not 100% test coverage. final String existingMessage = ruleIdToMessageMap.get(content.getRuleId()); final String newMessage = firstPart(content.getMessage()); if (existingMessage != null && !existingMessage.equals(newMessage)) { Assert.fail("Assertion id " + content.getRuleId() + " has been used with multiple failure messages: '" + existingMessage + "' AND '" + newMessage + "'"); } else { ruleIdToMessageMap.put(content.getRuleId(), newMessage); } } if (expectPass) { Assert.assertEquals("A concept from the " + ASSERT_CONCEPTS_PASS + " set actually failed! " + invalidContent, 0, invalidContent.size()); } else { Assert.assertNotEquals("A concept from the " + ASSERT_CONCEPTS_FAIL + " set actually passed! " + concept.toString(), 0, invalidContent.size()); } } } private String firstPart(String message) { return message.substring(0, Math.min(message.length(), 20)); } }
snomed-drools-engine/src/test/java/org/ihtsdo/drools/rulestestrig/RulesTestManual.java
package org.ihtsdo.drools.rulestestrig; import org.ihtsdo.drools.RuleExecutor; import org.ihtsdo.drools.RuleExecutorFactory; import org.ihtsdo.drools.domain.Concept; import org.ihtsdo.drools.domain.Constants; import org.ihtsdo.drools.domain.OntologyAxiom; import org.ihtsdo.drools.response.InvalidContent; import org.ihtsdo.drools.rulestestrig.domain.*; import org.ihtsdo.drools.rulestestrig.service.TestConceptService; import org.ihtsdo.drools.rulestestrig.service.TestDescriptionService; import org.ihtsdo.drools.rulestestrig.service.TestRelationshipService; import org.ihtsdo.drools.service.TestResourceProvider; import org.ihtsdo.otf.resourcemanager.ManualResourceConfiguration; import org.ihtsdo.otf.resourcemanager.ResourceConfiguration; import org.ihtsdo.otf.resourcemanager.ResourceManager; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import java.io.File; import java.io.FileNotFoundException; import java.util.*; @RunWith(Parameterized.class) public class RulesTestManual { private static final String GIVEN_CONCEPTS = "givenConcepts"; private static final String ASSERT_CONCEPTS_PASS = "assertConceptsPass"; private static final String ASSERT_CONCEPTS_FAIL = "assertConceptsFail"; private RuleExecutor ruleExecutor; private Map<String, Concept> concepts; private Map<String, List<TestConcept<TestDescription, TestRelationship>>> testConcepts; private TestConceptService conceptService; private TestDescriptionService descriptionService; private TestRelationshipService relationshipService; @Parameters(name = "{0}") public static Iterable<? extends Object> data() { final String rulesPath = "../../snomed-drools-rules"; // relative path to snomed-drools-rules, either check out to here or use symlink final File rulesDirectory = new File(rulesPath); Assert.assertTrue(rulesDirectory.isDirectory()); final List<File> ruleDirectories = new ArrayList<>(); for (File productGroupDirectory : rulesDirectory.listFiles(TestUtil.DIRECTORY_FILTER)) { for (File ruleGroupDirectory : productGroupDirectory.listFiles(TestUtil.DIRECTORY_FILTER)) { for (File ruleDirectory : ruleGroupDirectory.listFiles(TestUtil.DIRECTORY_FILTER)) { final File[] ruleFiles = ruleDirectory.listFiles(TestUtil.RULE_FILE_FILTER); if (ruleFiles.length > 0) { ruleDirectories.add(ruleDirectory); } } } } return ruleDirectories; } public RulesTestManual(File ruleDirectory) { this.ruleExecutor = new RuleExecutorFactory().createRuleExecutor(ruleDirectory.getAbsolutePath(), "OneRule"); this.concepts = new HashMap<>(); final File testCasesFile = new File(ruleDirectory, "test-cases.json"); if (testCasesFile.isFile()) { try { testConcepts = TestUtil.loadConceptMap(testCasesFile); } catch (FileNotFoundException e) { throw new AssertionError("Unexpected FileNotFoundException", e); } setConceptIdReferencesAndTempIds(testConcepts); final List<TestConcept<TestDescription, TestRelationship>> givenConcepts = testConcepts.get(GIVEN_CONCEPTS); if (givenConcepts != null) { for (TestConcept<TestDescription, TestRelationship> givenConcept : givenConcepts) { String id = givenConcept.getId(); Assert.assertNotNull("Concepts in the set '" + GIVEN_CONCEPTS + "' must have an ID", id); concepts.put(id, givenConcept); } } } } @Before public void setup() { ManualResourceConfiguration resourceConfiguration = new ManualResourceConfiguration(true, false, new ResourceConfiguration.Local("src/test/resources/dummy-test-resources"), null); TestResourceProvider testResourceProvider = this.ruleExecutor.newTestResourceProvider(new ResourceManager(resourceConfiguration, null)); conceptService = new TestConceptService(concepts); descriptionService = new TestDescriptionService(concepts, testResourceProvider); relationshipService = new TestRelationshipService(concepts); } @Test public void testRulesInDirectory() { final List<TestConcept<TestDescription, TestRelationship>> conceptsThatShouldPass = testConcepts.get(ASSERT_CONCEPTS_PASS); Assert.assertNotNull("The set of concepts '" + ASSERT_CONCEPTS_PASS + "' is required.", conceptsThatShouldPass); executeRulesAndAssertExpectations(ruleExecutor, conceptsThatShouldPass, true); final List<TestConcept<TestDescription, TestRelationship>> conceptsThatShouldFail = testConcepts.get(ASSERT_CONCEPTS_FAIL); Assert.assertNotNull("The set of concepts '" + ASSERT_CONCEPTS_FAIL + "' is required.", conceptsThatShouldPass); executeRulesAndAssertExpectations(ruleExecutor, conceptsThatShouldFail, false); } private void setConceptIdReferencesAndTempIds(Map<String, List<TestConcept<TestDescription, TestRelationship>>> testConcepts) { for (List<TestConcept<TestDescription, TestRelationship>> concepts : testConcepts.values()) { for (TestConcept<TestDescription, TestRelationship> concept : concepts) { setTempIdIfMissing(concept); final String id = concept.getId(); for (TestRelationship relationship : concept.getRelationships()) { setTempIdIfMissing(relationship); relationship.setSourceId(id); } for (TestDescription description : concept.getDescriptions()) { setTempIdIfMissing(description); description.setConceptId(id); if (description.isTextDefinition()) { description.setTypeId(Constants.TEXT_DEFINITION); } } for (OntologyAxiom ontologyAxiom : concept.getOntologyAxioms()) { TestOntologyAxiom testOntologyAxiom = (TestOntologyAxiom) ontologyAxiom; setTempIdIfMissing(testOntologyAxiom); testOntologyAxiom.setReferencedComponentId(id); } } } } private void setTempIdIfMissing(TestComponent component) { if (component.getId() == null) { component.setId("temp-id-" + UUID.randomUUID()); } } private void executeRulesAndAssertExpectations(RuleExecutor ruleExecutor, List<TestConcept<TestDescription, TestRelationship>> conceptsToTest, boolean expectPass) { for (TestConcept<TestDescription, TestRelationship> concept : conceptsToTest) { final HashSet<String> ruleSetNames = new HashSet<>(); ruleSetNames.add("OneRule"); final List<InvalidContent> invalidContent = ruleExecutor.execute( ruleSetNames, Collections.singleton(concept), conceptService, descriptionService, relationshipService, false, false); Set<String> uniqueComponentAssertionSet = new HashSet<>(); for (InvalidContent content : invalidContent) { String pair = content.getComponent().getId() + " " + content.getMessage(); if (!uniqueComponentAssertionSet.add(pair)) { Assert.fail("Component failures should not be reported multiple times. Duplicate component/message found: " + pair); } } if (expectPass) { Assert.assertEquals("A concept from the " + ASSERT_CONCEPTS_PASS + " set actually failed! " + invalidContent.toString(), 0, invalidContent.size()); } else { Assert.assertNotEquals("A concept from the " + ASSERT_CONCEPTS_FAIL + " set actually passed! " + concept.toString(), 0, invalidContent.size()); } } } }
FRI-101 Assert ruleId uniqueness in rule test rig.
snomed-drools-engine/src/test/java/org/ihtsdo/drools/rulestestrig/RulesTestManual.java
FRI-101 Assert ruleId uniqueness in rule test rig.
Java
apache-2.0
816bf9efaa1cd80bb3911a7d77704bdc585d6292
0
PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr,PATRIC3/p3_solr
package org.apache.lucene.codecs; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RAMOutputStream; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CodecUtil; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.fst.Builder; import org.apache.lucene.util.fst.ByteSequenceOutputs; import org.apache.lucene.util.fst.BytesRefFSTEnum; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.NoOutputs; import org.apache.lucene.util.fst.Util; /* TODO: - Currently there is a one-to-one mapping of indexed term to term block, but we could decouple the two, ie, put more terms into the index than there are blocks. The index would take up more RAM but then it'd be able to avoid seeking more often and could make PK/FuzzyQ faster if the additional indexed terms could store the offset into the terms block. - The blocks are not written in true depth-first order, meaning if you just next() the file pointer will sometimes jump backwards. For example, block foo* will be written before block f* because it finished before. This could possibly hurt performance if the terms dict is not hot, since OSs anticipate sequential file access. We could fix the writer to re-order the blocks as a 2nd pass. - Each block encodes the term suffixes packed sequentially using a separate vInt per term, which is 1) wasteful and 2) slow (must linear scan to find a particular suffix). We should instead 1) make random-access array so we can directly access the Nth suffix, and 2) bulk-encode this array using bulk int[] codecs; then at search time we can binary search when we seek a particular term. */ /** * block-based terms index and dictionary writer. * <p> * Writes terms dict and index, block-encoding (column * stride) each term's metadata for each set of terms * between two index terms. * * @see BlockTreeTermsReader * @lucene.experimental */ public class BlockTreeTermsWriter extends FieldsConsumer { public final static int DEFAULT_MIN_BLOCK_SIZE = 25; public final static int DEFAULT_MAX_BLOCK_SIZE = 48; //public final static boolean DEBUG = false; public final static boolean SAVE_DOT_FILES = false; static final int OUTPUT_FLAGS_NUM_BITS = 2; static final int OUTPUT_FLAGS_MASK = 0x3; static final int OUTPUT_FLAG_IS_FLOOR = 0x1; static final int OUTPUT_FLAG_HAS_TERMS = 0x2; /** Extension of terms file */ static final String TERMS_EXTENSION = "tim"; final static String TERMS_CODEC_NAME = "BLOCK_TREE_TERMS_DICT"; // Initial format public static final int TERMS_VERSION_START = 0; public static final int TERMS_VERSION_CURRENT = TERMS_VERSION_START; /** Extension of terms index file */ static final String TERMS_INDEX_EXTENSION = "tip"; final static String TERMS_INDEX_CODEC_NAME = "BLOCK_TREE_TERMS_INDEX"; // Initial format public static final int TERMS_INDEX_VERSION_START = 0; public static final int TERMS_INDEX_VERSION_CURRENT = TERMS_INDEX_VERSION_START; private final IndexOutput out; private final IndexOutput indexOut; final int minItemsInBlock; final int maxItemsInBlock; final PostingsWriterBase postingsWriter; final FieldInfos fieldInfos; FieldInfo currentField; private final List<TermsWriter> fields = new ArrayList<TermsWriter>(); // private final String segment; /** Create a new writer. The number of items (terms or * sub-blocks) per block will aim to be between * minItemsPerBlock and maxItemsPerBlock, though in some * cases the blocks may be smaller than the min. */ public BlockTreeTermsWriter( SegmentWriteState state, PostingsWriterBase postingsWriter, int minItemsInBlock, int maxItemsInBlock) throws IOException { if (minItemsInBlock <= 1) { throw new IllegalArgumentException("minItemsInBlock must be >= 2; got " + minItemsInBlock); } if (maxItemsInBlock <= 0) { throw new IllegalArgumentException("maxItemsInBlock must be >= 1; got " + maxItemsInBlock); } if (minItemsInBlock > maxItemsInBlock) { throw new IllegalArgumentException("maxItemsInBlock must be >= minItemsInBlock; got maxItemsInBlock=" + maxItemsInBlock + " minItemsInBlock=" + minItemsInBlock); } if (2*(minItemsInBlock-1) > maxItemsInBlock) { throw new IllegalArgumentException("maxItemsInBlock must be at least 2*(minItemsInBlock-1); got maxItemsInBlock=" + maxItemsInBlock + " minItemsInBlock=" + minItemsInBlock); } final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); out = state.directory.createOutput(termsFileName, state.context); boolean success = false; IndexOutput indexOut = null; try { fieldInfos = state.fieldInfos; this.minItemsInBlock = minItemsInBlock; this.maxItemsInBlock = maxItemsInBlock; writeHeader(out); //DEBUG = state.segmentName.equals("_4a"); final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); indexOut = state.directory.createOutput(termsIndexFileName, state.context); writeIndexHeader(indexOut); currentField = null; this.postingsWriter = postingsWriter; // segment = state.segmentName; // System.out.println("BTW.init seg=" + state.segmentName); postingsWriter.start(out); // have consumer write its format/header success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out, indexOut); } } this.indexOut = indexOut; } protected void writeHeader(IndexOutput out) throws IOException { CodecUtil.writeHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT); out.writeLong(0); // leave space for end index pointer } protected void writeIndexHeader(IndexOutput out) throws IOException { CodecUtil.writeHeader(out, TERMS_INDEX_CODEC_NAME, TERMS_INDEX_VERSION_CURRENT); out.writeLong(0); // leave space for end index pointer } protected void writeTrailer(IndexOutput out, long dirStart) throws IOException { out.seek(CodecUtil.headerLength(TERMS_CODEC_NAME)); out.writeLong(dirStart); } protected void writeIndexTrailer(IndexOutput indexOut, long dirStart) throws IOException { indexOut.seek(CodecUtil.headerLength(TERMS_INDEX_CODEC_NAME)); indexOut.writeLong(dirStart); } @Override public TermsConsumer addField(FieldInfo field) throws IOException { //DEBUG = field.name.equals("id"); //if (DEBUG) System.out.println("\nBTTW.addField seg=" + segment + " field=" + field.name); assert currentField == null || currentField.name.compareTo(field.name) < 0; currentField = field; final TermsWriter terms = new TermsWriter(field); fields.add(terms); return terms; } static long encodeOutput(long fp, boolean hasTerms, boolean isFloor) { assert fp < (1L << 62); return (fp << 2) | (hasTerms ? OUTPUT_FLAG_HAS_TERMS : 0) | (isFloor ? OUTPUT_FLAG_IS_FLOOR : 0); } private static class PendingEntry { public final boolean isTerm; protected PendingEntry(boolean isTerm) { this.isTerm = isTerm; } } private static final class PendingTerm extends PendingEntry { public final BytesRef term; public final TermStats stats; public PendingTerm(BytesRef term, TermStats stats) { super(true); this.term = term; this.stats = stats; } @Override public String toString() { return term.utf8ToString(); } } private static final class PendingBlock extends PendingEntry { public final BytesRef prefix; public final long fp; public FST<BytesRef> index; public List<FST<BytesRef>> subIndices; public final boolean hasTerms; public final boolean isFloor; public final int floorLeadByte; private final IntsRef scratchIntsRef = new IntsRef(); public PendingBlock(BytesRef prefix, long fp, boolean hasTerms, boolean isFloor, int floorLeadByte, List<FST<BytesRef>> subIndices) { super(false); this.prefix = prefix; this.fp = fp; this.hasTerms = hasTerms; this.isFloor = isFloor; this.floorLeadByte = floorLeadByte; this.subIndices = subIndices; } @Override public String toString() { return "BLOCK: " + prefix.utf8ToString(); } public void compileIndex(List<PendingBlock> floorBlocks, RAMOutputStream scratchBytes) throws IOException { assert (isFloor && floorBlocks != null && floorBlocks.size() != 0) || (!isFloor && floorBlocks == null): "isFloor=" + isFloor + " floorBlocks=" + floorBlocks; assert scratchBytes.getFilePointer() == 0; // TODO: try writing the leading vLong in MSB order // (opposite of what Lucene does today), for better // outputs sharing in the FST scratchBytes.writeVLong(encodeOutput(fp, hasTerms, isFloor)); if (isFloor) { scratchBytes.writeVInt(floorBlocks.size()); for (PendingBlock sub : floorBlocks) { assert sub.floorLeadByte != -1; //if (DEBUG) { // System.out.println(" write floorLeadByte=" + Integer.toHexString(sub.floorLeadByte&0xff)); //} scratchBytes.writeByte((byte) sub.floorLeadByte); assert sub.fp > fp; scratchBytes.writeVLong((sub.fp - fp) << 1 | (sub.hasTerms ? 1 : 0)); } } final ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton(); final Builder<BytesRef> indexBuilder = new Builder<BytesRef>(FST.INPUT_TYPE.BYTE1, 0, 0, true, false, Integer.MAX_VALUE, outputs, null, false); //if (DEBUG) { // System.out.println(" compile index for prefix=" + prefix); //} //indexBuilder.DEBUG = false; final byte[] bytes = new byte[(int) scratchBytes.getFilePointer()]; assert bytes.length > 0; scratchBytes.writeTo(bytes, 0); indexBuilder.add(Util.toIntsRef(prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.length)); scratchBytes.reset(); // Copy over index for all sub-blocks if (subIndices != null) { for(FST<BytesRef> subIndex : subIndices) { append(indexBuilder, subIndex); } } if (floorBlocks != null) { for (PendingBlock sub : floorBlocks) { if (sub.subIndices != null) { for(FST<BytesRef> subIndex : sub.subIndices) { append(indexBuilder, subIndex); } } sub.subIndices = null; } } index = indexBuilder.finish(); subIndices = null; /* Writer w = new OutputStreamWriter(new FileOutputStream("out.dot")); Util.toDot(index, w, false, false); System.out.println("SAVED to out.dot"); w.close(); */ } // TODO: maybe we could add bulk-add method to // Builder? Takes FST and unions it w/ current // FST. private void append(Builder<BytesRef> builder, FST<BytesRef> subIndex) throws IOException { final BytesRefFSTEnum<BytesRef> subIndexEnum = new BytesRefFSTEnum<BytesRef>(subIndex); BytesRefFSTEnum.InputOutput<BytesRef> indexEnt; while((indexEnt = subIndexEnum.next()) != null) { //if (DEBUG) { // System.out.println(" add sub=" + indexEnt.input + " " + indexEnt.input + " output=" + indexEnt.output); //} builder.add(Util.toIntsRef(indexEnt.input, scratchIntsRef), indexEnt.output); } } } final RAMOutputStream scratchBytes = new RAMOutputStream(); class TermsWriter extends TermsConsumer { private final FieldInfo fieldInfo; private long numTerms; long sumTotalTermFreq; long sumDocFreq; int docCount; long indexStartFP; // Used only to partition terms into the block tree; we // don't pull an FST from this builder: private final NoOutputs noOutputs; private final Builder<Object> blockBuilder; // PendingTerm or PendingBlock: private final List<PendingEntry> pending = new ArrayList<PendingEntry>(); // Index into pending of most recently written block private int lastBlockIndex = -1; // Re-used when segmenting a too-large block into floor // blocks: private int[] subBytes = new int[10]; private int[] subTermCounts = new int[10]; private int[] subTermCountSums = new int[10]; private int[] subSubCounts = new int[10]; // This class assigns terms to blocks "naturally", ie, // according to the number of terms under a given prefix // that we encounter: private class FindBlocks extends Builder.FreezeTail<Object> { @Override public void freeze(final Builder.UnCompiledNode<Object>[] frontier, int prefixLenPlus1, final IntsRef lastInput) throws IOException { //if (DEBUG) System.out.println(" freeze prefixLenPlus1=" + prefixLenPlus1); for(int idx=lastInput.length; idx >= prefixLenPlus1; idx--) { final Builder.UnCompiledNode<Object> node = frontier[idx]; long totCount = 0; if (node.isFinal) { totCount++; } for(int arcIdx=0;arcIdx<node.numArcs;arcIdx++) { @SuppressWarnings("unchecked") final Builder.UnCompiledNode<Object> target = (Builder.UnCompiledNode<Object>) node.arcs[arcIdx].target; totCount += target.inputCount; target.clear(); node.arcs[arcIdx].target = null; } node.numArcs = 0; if (totCount >= minItemsInBlock || idx == 0) { // We are on a prefix node that has enough // entries (terms or sub-blocks) under it to let // us write a new block or multiple blocks (main // block + follow on floor blocks): //if (DEBUG) { // if (totCount < minItemsInBlock && idx != 0) { // System.out.println(" force block has terms"); // } //} writeBlocks(lastInput, idx, (int) totCount); node.inputCount = 1; } else { // stragglers! carry count upwards node.inputCount = totCount; } frontier[idx] = new Builder.UnCompiledNode<Object>(blockBuilder, idx); } } } // Write the top count entries on the pending stack as // one or more blocks. Returns how many blocks were // written. If the entry count is <= maxItemsPerBlock // we just write a single block; else we break into // primary (initial) block and then one or more // following floor blocks: void writeBlocks(IntsRef prevTerm, int prefixLength, int count) throws IOException { if (prefixLength == 0 || count <= maxItemsInBlock) { // Easy case: not floor block. Eg, prefix is "foo", // and we found 30 terms/sub-blocks starting w/ that // prefix, and minItemsInBlock <= 30 <= // maxItemsInBlock. final PendingBlock nonFloorBlock = writeBlock(prevTerm, prefixLength, prefixLength, count, count, 0, false, -1, true); nonFloorBlock.compileIndex(null, scratchBytes); pending.add(nonFloorBlock); } else { // Floor block case. Eg, prefix is "foo" but we // have 100 terms/sub-blocks starting w/ that // prefix. We segment the entries into a primary // block and following floor blocks using the first // label in the suffix to assign to floor blocks. // TODO: we could store min & max suffix start byte // in each block, to make floor blocks authoritative //if (DEBUG) { // final BytesRef prefix = new BytesRef(prefixLength); // for(int m=0;m<prefixLength;m++) { // prefix.bytes[m] = (byte) prevTerm.ints[m]; // } // prefix.length = prefixLength; // //System.out.println("\nWBS count=" + count + " prefix=" + prefix.utf8ToString() + " " + prefix); // System.out.println("writeBlocks: prefix=" + prefix + " " + prefix + " count=" + count + " pending.size()=" + pending.size()); //} //System.out.println("\nwbs count=" + count); final int savLabel = prevTerm.ints[prevTerm.offset + prefixLength]; // Count up how many items fall under // each unique label after the prefix. // TODO: this is wasteful since the builder had // already done this (partitioned these sub-terms // according to their leading prefix byte) final List<PendingEntry> slice = pending.subList(pending.size()-count, pending.size()); int lastSuffixLeadLabel = -1; int termCount = 0; int subCount = 0; int numSubs = 0; for(PendingEntry ent : slice) { // First byte in the suffix of this term final int suffixLeadLabel; if (ent.isTerm) { PendingTerm term = (PendingTerm) ent; if (term.term.length == prefixLength) { // Suffix is 0, ie prefix 'foo' and term is // 'foo' so the term has empty string suffix // in this block assert lastSuffixLeadLabel == -1; assert numSubs == 0; suffixLeadLabel = -1; } else { suffixLeadLabel = term.term.bytes[term.term.offset + prefixLength] & 0xff; } } else { PendingBlock block = (PendingBlock) ent; assert block.prefix.length > prefixLength; suffixLeadLabel = block.prefix.bytes[block.prefix.offset + prefixLength] & 0xff; } if (suffixLeadLabel != lastSuffixLeadLabel && (termCount + subCount) != 0) { if (subBytes.length == numSubs) { subBytes = ArrayUtil.grow(subBytes); subTermCounts = ArrayUtil.grow(subTermCounts); subSubCounts = ArrayUtil.grow(subSubCounts); } subBytes[numSubs] = lastSuffixLeadLabel; lastSuffixLeadLabel = suffixLeadLabel; subTermCounts[numSubs] = termCount; subSubCounts[numSubs] = subCount; /* if (suffixLeadLabel == -1) { System.out.println(" sub " + -1 + " termCount=" + termCount + " subCount=" + subCount); } else { System.out.println(" sub " + Integer.toHexString(suffixLeadLabel) + " termCount=" + termCount + " subCount=" + subCount); } */ termCount = subCount = 0; numSubs++; } if (ent.isTerm) { termCount++; } else { subCount++; } } if (subBytes.length == numSubs) { subBytes = ArrayUtil.grow(subBytes); subTermCounts = ArrayUtil.grow(subTermCounts); subSubCounts = ArrayUtil.grow(subSubCounts); } subBytes[numSubs] = lastSuffixLeadLabel; subTermCounts[numSubs] = termCount; subSubCounts[numSubs] = subCount; numSubs++; /* if (lastSuffixLeadLabel == -1) { System.out.println(" sub " + -1 + " termCount=" + termCount + " subCount=" + subCount); } else { System.out.println(" sub " + Integer.toHexString(lastSuffixLeadLabel) + " termCount=" + termCount + " subCount=" + subCount); } */ if (subTermCountSums.length < numSubs) { subTermCountSums = ArrayUtil.grow(subTermCountSums, numSubs); } // Roll up (backwards) the termCounts; postings impl // needs this to know where to pull the term slice // from its pending terms stack: int sum = 0; for(int idx=numSubs-1;idx>=0;idx--) { sum += subTermCounts[idx]; subTermCountSums[idx] = sum; } // TODO: make a better segmenter? It'd have to // absorb the too-small end blocks backwards into // the previous blocks // Naive greedy segmentation; this is not always // best (it can produce a too-small block as the // last block): int pendingCount = 0; int startLabel = subBytes[0]; int curStart = count; subCount = 0; final List<PendingBlock> floorBlocks = new ArrayList<PendingBlock>(); PendingBlock firstBlock = null; for(int sub=0;sub<numSubs;sub++) { pendingCount += subTermCounts[sub] + subSubCounts[sub]; //System.out.println(" " + (subTermCounts[sub] + subSubCounts[sub])); subCount++; // Greedily make a floor block as soon as we've // crossed the min count if (pendingCount >= minItemsInBlock) { final int curPrefixLength; if (startLabel == -1) { curPrefixLength = prefixLength; } else { curPrefixLength = 1+prefixLength; // floor term: prevTerm.ints[prevTerm.offset + prefixLength] = startLabel; } //System.out.println(" " + subCount + " subs"); final PendingBlock floorBlock = writeBlock(prevTerm, prefixLength, curPrefixLength, curStart, pendingCount, subTermCountSums[1+sub], true, startLabel, curStart == pendingCount); if (firstBlock == null) { firstBlock = floorBlock; } else { floorBlocks.add(floorBlock); } curStart -= pendingCount; //System.out.println(" = " + pendingCount); pendingCount = 0; assert minItemsInBlock == 1 || subCount > 1: "minItemsInBlock=" + minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength; subCount = 0; startLabel = subBytes[sub+1]; if (curStart == 0) { break; } if (curStart <= maxItemsInBlock) { // remainder is small enough to fit into a // block. NOTE that this may be too small (< // minItemsInBlock); need a true segmenter // here assert startLabel != -1; assert firstBlock != null; prevTerm.ints[prevTerm.offset + prefixLength] = startLabel; //System.out.println(" final " + (numSubs-sub-1) + " subs"); /* for(sub++;sub < numSubs;sub++) { System.out.println(" " + (subTermCounts[sub] + subSubCounts[sub])); } System.out.println(" = " + curStart); if (curStart < minItemsInBlock) { System.out.println(" **"); } */ floorBlocks.add(writeBlock(prevTerm, prefixLength, prefixLength+1, curStart, curStart, 0, true, startLabel, true)); break; } } } prevTerm.ints[prevTerm.offset + prefixLength] = savLabel; assert firstBlock != null; firstBlock.compileIndex(floorBlocks, scratchBytes); pending.add(firstBlock); //if (DEBUG) System.out.println(" done pending.size()=" + pending.size()); } lastBlockIndex = pending.size()-1; } // for debugging @SuppressWarnings("unused") private String toString(BytesRef b) { try { return b.utf8ToString() + " " + b; } catch (Throwable t) { // If BytesRef isn't actually UTF8, or it's eg a // prefix of UTF8 that ends mid-unicode-char, we // fallback to hex: return b.toString(); } } // Writes all entries in the pending slice as a single // block: private PendingBlock writeBlock(IntsRef prevTerm, int prefixLength, int indexPrefixLength, int startBackwards, int length, int futureTermCount, boolean isFloor, int floorLeadByte, boolean isLastInFloor) throws IOException { assert length > 0; final int start = pending.size()-startBackwards; assert start >= 0: "pending.size()=" + pending.size() + " startBackwards=" + startBackwards + " length=" + length; final List<PendingEntry> slice = pending.subList(start, start + length); final long startFP = out.getFilePointer(); final BytesRef prefix = new BytesRef(indexPrefixLength); for(int m=0;m<indexPrefixLength;m++) { prefix.bytes[m] = (byte) prevTerm.ints[m]; } prefix.length = indexPrefixLength; // Write block header: out.writeVInt((length<<1)|(isLastInFloor ? 1:0)); // if (DEBUG) { // System.out.println(" writeBlock " + (isFloor ? "(floor) " : "") + "seg=" + segment + " pending.size()=" + pending.size() + " prefixLength=" + prefixLength + " indexPrefix=" + toString(prefix) + " entCount=" + length + " startFP=" + startFP + " futureTermCount=" + futureTermCount + (isFloor ? (" floorLeadByte=" + Integer.toHexString(floorLeadByte&0xff)) : "") + " isLastInFloor=" + isLastInFloor); // } // 1st pass: pack term suffix bytes into byte[] blob // TODO: cutover to bulk int codec... simple64? final boolean isLeafBlock; if (lastBlockIndex < start) { // This block definitely does not contain sub-blocks: isLeafBlock = true; //System.out.println("no scan true isFloor=" + isFloor); } else if (!isFloor) { // This block definitely does contain at least one sub-block: isLeafBlock = false; //System.out.println("no scan false " + lastBlockIndex + " vs start=" + start + " len=" + length); } else { // Must scan up-front to see if there is a sub-block boolean v = true; //System.out.println("scan " + lastBlockIndex + " vs start=" + start + " len=" + length); for (PendingEntry ent : slice) { if (!ent.isTerm) { v = false; break; } } isLeafBlock = v; } final List<FST<BytesRef>> subIndices; int termCount; if (isLeafBlock) { subIndices = null; for (PendingEntry ent : slice) { assert ent.isTerm; PendingTerm term = (PendingTerm) ent; final int suffix = term.term.length - prefixLength; // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); // System.arraycopy(term.term.bytes, prefixLength, suffixBytes.bytes, 0, suffix); // suffixBytes.length = suffix; // System.out.println(" write term suffix=" + suffixBytes); // } // For leaf block we write suffix straight bytesWriter.writeVInt(suffix); bytesWriter.writeBytes(term.term.bytes, prefixLength, suffix); // Write term stats, to separate byte[] blob: bytesWriter2.writeVInt(term.stats.docFreq); if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { assert term.stats.totalTermFreq >= term.stats.docFreq; bytesWriter2.writeVLong(term.stats.totalTermFreq - term.stats.docFreq); } } termCount = length; } else { subIndices = new ArrayList<FST<BytesRef>>(); termCount = 0; for (PendingEntry ent : slice) { if (ent.isTerm) { PendingTerm term = (PendingTerm) ent; final int suffix = term.term.length - prefixLength; // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); // System.arraycopy(term.term.bytes, prefixLength, suffixBytes.bytes, 0, suffix); // suffixBytes.length = suffix; // System.out.println(" write term suffix=" + suffixBytes); // } // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block bytesWriter.writeVInt(suffix<<1); bytesWriter.writeBytes(term.term.bytes, prefixLength, suffix); // Write term stats, to separate byte[] blob: bytesWriter2.writeVInt(term.stats.docFreq); if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { assert term.stats.totalTermFreq >= term.stats.docFreq; bytesWriter2.writeVLong(term.stats.totalTermFreq - term.stats.docFreq); } termCount++; } else { PendingBlock block = (PendingBlock) ent; final int suffix = block.prefix.length - prefixLength; assert suffix > 0; // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block bytesWriter.writeVInt((suffix<<1)|1); bytesWriter.writeBytes(block.prefix.bytes, prefixLength, suffix); assert block.fp < startFP; // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); // System.arraycopy(block.prefix.bytes, prefixLength, suffixBytes.bytes, 0, suffix); // suffixBytes.length = suffix; // System.out.println(" write sub-block suffix=" + toString(suffixBytes) + " subFP=" + block.fp + " subCode=" + (startFP-block.fp) + " floor=" + block.isFloor); // } bytesWriter.writeVLong(startFP - block.fp); subIndices.add(block.index); } } assert subIndices.size() != 0; } // TODO: we could block-write the term suffix pointers; // this would take more space but would enable binary // search on lookup // Write suffixes byte[] blob to terms dict output: out.writeVInt((int) (bytesWriter.getFilePointer() << 1) | (isLeafBlock ? 1:0)); bytesWriter.writeTo(out); bytesWriter.reset(); // Write term stats byte[] blob out.writeVInt((int) bytesWriter2.getFilePointer()); bytesWriter2.writeTo(out); bytesWriter2.reset(); // Have postings writer write block postingsWriter.flushTermsBlock(futureTermCount+termCount, termCount); // Remove slice replaced by block: slice.clear(); if (lastBlockIndex >= start) { if (lastBlockIndex < start+length) { lastBlockIndex = start; } else { lastBlockIndex -= length; } } // if (DEBUG) { // System.out.println(" fpEnd=" + out.getFilePointer()); // } return new PendingBlock(prefix, startFP, termCount != 0, isFloor, floorLeadByte, subIndices); } TermsWriter(FieldInfo fieldInfo) { this.fieldInfo = fieldInfo; noOutputs = NoOutputs.getSingleton(); // This Builder is just used transiently to fragment // terms into "good" blocks; we don't save the // resulting FST: blockBuilder = new Builder<Object>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, Integer.MAX_VALUE, noOutputs, new FindBlocks(), false); postingsWriter.setField(fieldInfo); } @Override public Comparator<BytesRef> getComparator() { return BytesRef.getUTF8SortedAsUnicodeComparator(); } @Override public PostingsConsumer startTerm(BytesRef text) throws IOException { //if (DEBUG) System.out.println("\nBTTW.startTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment); postingsWriter.startTerm(); /* if (fieldInfo.name.equals("id")) { postingsWriter.termID = Integer.parseInt(text.utf8ToString()); } else { postingsWriter.termID = -1; } */ return postingsWriter; } private final IntsRef scratchIntsRef = new IntsRef(); @Override public void finishTerm(BytesRef text, TermStats stats) throws IOException { assert stats.docFreq > 0; //if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq); blockBuilder.add(Util.toIntsRef(text, scratchIntsRef), noOutputs.getNoOutput()); pending.add(new PendingTerm(BytesRef.deepCopyOf(text), stats)); postingsWriter.finishTerm(stats); numTerms++; } // Finishes all terms in this field @Override public void finish(long sumTotalTermFreq, long sumDocFreq, int docCount) throws IOException { if (numTerms > 0) { blockBuilder.finish(); // We better have one final "root" block: assert pending.size() == 1 && !pending.get(0).isTerm: "pending.size()=" + pending.size() + " pending=" + pending; final PendingBlock root = (PendingBlock) pending.get(0); assert root.prefix.length == 0; assert root.index.getEmptyOutput() != null; this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; this.docCount = docCount; // Write FST to index indexStartFP = indexOut.getFilePointer(); root.index.save(indexOut); //System.out.println(" write FST " + indexStartFP + " field=" + fieldInfo.name); // if (SAVE_DOT_FILES || DEBUG) { // final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; // Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName)); // Util.toDot(root.index, w, false, false); // System.out.println("SAVED to " + dotFileName); // w.close(); // } } else { assert sumTotalTermFreq == 0; assert sumDocFreq == 0; assert docCount == 0; } } private final RAMOutputStream bytesWriter = new RAMOutputStream(); private final RAMOutputStream bytesWriter2 = new RAMOutputStream(); } @Override public void close() throws IOException { IOException ioe = null; try { int nonZeroCount = 0; for(TermsWriter field : fields) { if (field.numTerms > 0) { nonZeroCount++; } } final long dirStart = out.getFilePointer(); final long indexDirStart = indexOut.getFilePointer(); out.writeVInt(nonZeroCount); for(TermsWriter field : fields) { if (field.numTerms > 0) { //System.out.println(" field " + field.fieldInfo.name + " " + field.numTerms + " terms"); out.writeVInt(field.fieldInfo.number); out.writeVLong(field.numTerms); final BytesRef rootCode = ((PendingBlock) field.pending.get(0)).index.getEmptyOutput(); assert rootCode != null: "field=" + field.fieldInfo.name + " numTerms=" + field.numTerms; out.writeVInt(rootCode.length); out.writeBytes(rootCode.bytes, rootCode.offset, rootCode.length); if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { out.writeVLong(field.sumTotalTermFreq); } out.writeVLong(field.sumDocFreq); out.writeVInt(field.docCount); indexOut.writeVLong(field.indexStartFP); } } writeTrailer(out, dirStart); writeIndexTrailer(indexOut, indexDirStart); } catch (IOException ioe2) { ioe = ioe2; } finally { IOUtils.closeWhileHandlingException(ioe, out, indexOut, postingsWriter); } } }
lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java
package org.apache.lucene.codecs; /** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.io.IOException; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import org.apache.lucene.index.FieldInfo.IndexOptions; import org.apache.lucene.index.FieldInfo; import org.apache.lucene.index.FieldInfos; import org.apache.lucene.index.IndexFileNames; import org.apache.lucene.index.SegmentWriteState; import org.apache.lucene.store.IndexOutput; import org.apache.lucene.store.RAMOutputStream; import org.apache.lucene.util.ArrayUtil; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.CodecUtil; import org.apache.lucene.util.IOUtils; import org.apache.lucene.util.IntsRef; import org.apache.lucene.util.fst.Builder; import org.apache.lucene.util.fst.ByteSequenceOutputs; import org.apache.lucene.util.fst.BytesRefFSTEnum; import org.apache.lucene.util.fst.FST; import org.apache.lucene.util.fst.NoOutputs; import org.apache.lucene.util.fst.Util; /* TODO: - Currently there is a one-to-one mapping of indexed term to term block, but we could decouple the two, ie, put more terms into the index than there are blocks. The index would take up more RAM but then it'd be able to avoid seeking more often and could make PK/FuzzyQ faster if the additional indexed terms could store the offset into the terms block. - The blocks are not written in true depth-first order, meaning if you just next() the file pointer will sometimes jump backwards. For example, block foo* will be written before block f* because it finished before. This could possibly hurt performance if the terms dict is not hot, since OSs anticipate sequential file access. We could fix the writer to re-order the blocks as a 2nd pass. - Each block encodes the term suffixes packed sequentially using a separate vInt per term, which is 1) wasteful and 2) slow (must linear scan to find a particular suffix). We should instead 1) make random-access array so we can directly access the Nth suffix, and 2) bulk-encode this array using bulk int[] codecs; then at search time we can binary search when we seek a particular term. */ /** * block-based terms index and dictionary writer. * <p> * Writes terms dict and index, block-encoding (column * stride) each term's metadata for each set of terms * between two index terms. * * @see BlockTreeTermsReader * @lucene.experimental */ public class BlockTreeTermsWriter extends FieldsConsumer { public final static int DEFAULT_MIN_BLOCK_SIZE = 25; public final static int DEFAULT_MAX_BLOCK_SIZE = 48; //public final static boolean DEBUG = false; public final static boolean SAVE_DOT_FILES = false; static final int OUTPUT_FLAGS_NUM_BITS = 2; static final int OUTPUT_FLAGS_MASK = 0x3; static final int OUTPUT_FLAG_IS_FLOOR = 0x1; static final int OUTPUT_FLAG_HAS_TERMS = 0x2; /** Extension of terms file */ static final String TERMS_EXTENSION = "tim"; final static String TERMS_CODEC_NAME = "BLOCK_TREE_TERMS_DICT"; // Initial format public static final int TERMS_VERSION_START = 0; public static final int TERMS_VERSION_CURRENT = TERMS_VERSION_START; /** Extension of terms index file */ static final String TERMS_INDEX_EXTENSION = "tip"; final static String TERMS_INDEX_CODEC_NAME = "BLOCK_TREE_TERMS_INDEX"; // Initial format public static final int TERMS_INDEX_VERSION_START = 0; public static final int TERMS_INDEX_VERSION_CURRENT = TERMS_INDEX_VERSION_START; private final IndexOutput out; private final IndexOutput indexOut; final int minItemsInBlock; final int maxItemsInBlock; final PostingsWriterBase postingsWriter; final FieldInfos fieldInfos; FieldInfo currentField; private final List<TermsWriter> fields = new ArrayList<TermsWriter>(); // private final String segment; /** Create a new writer. The number of items (terms or * sub-blocks) per block will aim to be between * minItemsPerBlock and maxItemsPerBlock, though in some * cases the blocks may be smaller than the min. */ public BlockTreeTermsWriter( SegmentWriteState state, PostingsWriterBase postingsWriter, int minItemsInBlock, int maxItemsInBlock) throws IOException { if (minItemsInBlock <= 1) { throw new IllegalArgumentException("minItemsInBlock must be >= 2; got " + minItemsInBlock); } if (maxItemsInBlock <= 0) { throw new IllegalArgumentException("maxItemsInBlock must be >= 1; got " + maxItemsInBlock); } if (minItemsInBlock > maxItemsInBlock) { throw new IllegalArgumentException("maxItemsInBlock must be >= minItemsInBlock; got maxItemsInBlock=" + maxItemsInBlock + " minItemsInBlock=" + minItemsInBlock); } if (2*(minItemsInBlock-1) > maxItemsInBlock) { throw new IllegalArgumentException("maxItemsInBlock must be at least 2*(minItemsInBlock-1); got maxItemsInBlock=" + maxItemsInBlock + " minItemsInBlock=" + minItemsInBlock); } final String termsFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_EXTENSION); out = state.directory.createOutput(termsFileName, state.context); boolean success = false; IndexOutput indexOut = null; try { fieldInfos = state.fieldInfos; this.minItemsInBlock = minItemsInBlock; this.maxItemsInBlock = maxItemsInBlock; writeHeader(out); //DEBUG = state.segmentName.equals("_4a"); final String termsIndexFileName = IndexFileNames.segmentFileName(state.segmentInfo.name, state.segmentSuffix, TERMS_INDEX_EXTENSION); indexOut = state.directory.createOutput(termsIndexFileName, state.context); writeIndexHeader(indexOut); currentField = null; this.postingsWriter = postingsWriter; // segment = state.segmentName; // System.out.println("BTW.init seg=" + state.segmentName); postingsWriter.start(out); // have consumer write its format/header success = true; } finally { if (!success) { IOUtils.closeWhileHandlingException(out, indexOut); } } this.indexOut = indexOut; } protected void writeHeader(IndexOutput out) throws IOException { CodecUtil.writeHeader(out, TERMS_CODEC_NAME, TERMS_VERSION_CURRENT); out.writeLong(0); // leave space for end index pointer } protected void writeIndexHeader(IndexOutput out) throws IOException { CodecUtil.writeHeader(out, TERMS_INDEX_CODEC_NAME, TERMS_INDEX_VERSION_CURRENT); out.writeLong(0); // leave space for end index pointer } protected void writeTrailer(IndexOutput out, long dirStart) throws IOException { out.seek(CodecUtil.headerLength(TERMS_CODEC_NAME)); out.writeLong(dirStart); } protected void writeIndexTrailer(IndexOutput indexOut, long dirStart) throws IOException { indexOut.seek(CodecUtil.headerLength(TERMS_INDEX_CODEC_NAME)); indexOut.writeLong(dirStart); } @Override public TermsConsumer addField(FieldInfo field) throws IOException { //DEBUG = field.name.equals("id"); //if (DEBUG) System.out.println("\nBTTW.addField seg=" + segment + " field=" + field.name); assert currentField == null || currentField.name.compareTo(field.name) < 0; currentField = field; final TermsWriter terms = new TermsWriter(field); fields.add(terms); return terms; } static long encodeOutput(long fp, boolean hasTerms, boolean isFloor) { assert fp < (1L << 62); return (fp << 2) | (hasTerms ? OUTPUT_FLAG_HAS_TERMS : 0) | (isFloor ? OUTPUT_FLAG_IS_FLOOR : 0); } private static class PendingEntry { public final boolean isTerm; protected PendingEntry(boolean isTerm) { this.isTerm = isTerm; } } private static final class PendingTerm extends PendingEntry { public final BytesRef term; public final TermStats stats; public PendingTerm(BytesRef term, TermStats stats) { super(true); this.term = term; this.stats = stats; } @Override public String toString() { return term.utf8ToString(); } } private static final class PendingBlock extends PendingEntry { public final BytesRef prefix; public final long fp; public FST<BytesRef> index; public List<FST<BytesRef>> subIndices; public final boolean hasTerms; public final boolean isFloor; public final int floorLeadByte; private final IntsRef scratchIntsRef = new IntsRef(); public PendingBlock(BytesRef prefix, long fp, boolean hasTerms, boolean isFloor, int floorLeadByte, List<FST<BytesRef>> subIndices) { super(false); this.prefix = prefix; this.fp = fp; this.hasTerms = hasTerms; this.isFloor = isFloor; this.floorLeadByte = floorLeadByte; this.subIndices = subIndices; } @Override public String toString() { return "BLOCK: " + prefix.utf8ToString(); } public void compileIndex(List<PendingBlock> floorBlocks, RAMOutputStream scratchBytes) throws IOException { assert (isFloor && floorBlocks != null && floorBlocks.size() != 0) || (!isFloor && floorBlocks == null): "isFloor=" + isFloor + " floorBlocks=" + floorBlocks; assert scratchBytes.getFilePointer() == 0; // TODO: try writing the leading vLong in MSB order // (opposite of what Lucene does today), for better // outputs sharing in the FST scratchBytes.writeVLong(encodeOutput(fp, hasTerms, isFloor)); if (isFloor) { scratchBytes.writeVInt(floorBlocks.size()); for (PendingBlock sub : floorBlocks) { assert sub.floorLeadByte != -1; //if (DEBUG) { // System.out.println(" write floorLeadByte=" + Integer.toHexString(sub.floorLeadByte&0xff)); //} scratchBytes.writeByte((byte) sub.floorLeadByte); assert sub.fp > fp; scratchBytes.writeVLong((sub.fp - fp) << 1 | (sub.hasTerms ? 1 : 0)); } } final ByteSequenceOutputs outputs = ByteSequenceOutputs.getSingleton(); final Builder<BytesRef> indexBuilder = new Builder<BytesRef>(FST.INPUT_TYPE.BYTE1, 0, 0, true, false, Integer.MAX_VALUE, outputs, null, false); //if (DEBUG) { // System.out.println(" compile index for prefix=" + prefix); //} //indexBuilder.DEBUG = false; final byte[] bytes = new byte[(int) scratchBytes.getFilePointer()]; assert bytes.length > 0; scratchBytes.writeTo(bytes, 0); indexBuilder.add(Util.toIntsRef(prefix, scratchIntsRef), new BytesRef(bytes, 0, bytes.length)); scratchBytes.reset(); // Copy over index for all sub-blocks if (subIndices != null) { for(FST<BytesRef> subIndex : subIndices) { append(indexBuilder, subIndex); } } if (floorBlocks != null) { for (PendingBlock sub : floorBlocks) { if (sub.subIndices != null) { for(FST<BytesRef> subIndex : sub.subIndices) { append(indexBuilder, subIndex); } } sub.subIndices = null; } } index = indexBuilder.finish(); subIndices = null; /* Writer w = new OutputStreamWriter(new FileOutputStream("out.dot")); Util.toDot(index, w, false, false); System.out.println("SAVED to out.dot"); w.close(); */ } // TODO: maybe we could add bulk-add method to // Builder? Takes FST and unions it w/ current // FST. private void append(Builder<BytesRef> builder, FST<BytesRef> subIndex) throws IOException { final BytesRefFSTEnum<BytesRef> subIndexEnum = new BytesRefFSTEnum<BytesRef>(subIndex); BytesRefFSTEnum.InputOutput<BytesRef> indexEnt; while((indexEnt = subIndexEnum.next()) != null) { //if (DEBUG) { // System.out.println(" add sub=" + indexEnt.input + " " + indexEnt.input + " output=" + indexEnt.output); //} builder.add(Util.toIntsRef(indexEnt.input, scratchIntsRef), indexEnt.output); } } } final RAMOutputStream scratchBytes = new RAMOutputStream(); class TermsWriter extends TermsConsumer { private final FieldInfo fieldInfo; private long numTerms; long sumTotalTermFreq; long sumDocFreq; int docCount; long indexStartFP; // Used only to partition terms into the block tree; we // don't pull an FST from this builder: private final NoOutputs noOutputs; private final Builder<Object> blockBuilder; // PendingTerm or PendingBlock: private final List<PendingEntry> pending = new ArrayList<PendingEntry>(); // Index into pending of most recently written block private int lastBlockIndex = -1; // Re-used when segmenting a too-large block into floor // blocks: private int[] subBytes = new int[10]; private int[] subTermCounts = new int[10]; private int[] subTermCountSums = new int[10]; private int[] subSubCounts = new int[10]; // This class assigns terms to blocks "naturally", ie, // according to the number of terms under a given prefix // that we encounter: private class FindBlocks extends Builder.FreezeTail<Object> { @Override public void freeze(final Builder.UnCompiledNode<Object>[] frontier, int prefixLenPlus1, final IntsRef lastInput) throws IOException { //if (DEBUG) System.out.println(" freeze prefixLenPlus1=" + prefixLenPlus1); for(int idx=lastInput.length; idx >= prefixLenPlus1; idx--) { final Builder.UnCompiledNode<Object> node = frontier[idx]; long totCount = 0; if (node.isFinal) { totCount++; } for(int arcIdx=0;arcIdx<node.numArcs;arcIdx++) { @SuppressWarnings("unchecked") final Builder.UnCompiledNode<Object> target = (Builder.UnCompiledNode<Object>) node.arcs[arcIdx].target; totCount += target.inputCount; target.clear(); node.arcs[arcIdx].target = null; } node.numArcs = 0; if (totCount >= minItemsInBlock || idx == 0) { // We are on a prefix node that has enough // entries (terms or sub-blocks) under it to let // us write a new block or multiple blocks (main // block + follow on floor blocks): //if (DEBUG) { // if (totCount < minItemsInBlock && idx != 0) { // System.out.println(" force block has terms"); // } //} writeBlocks(lastInput, idx, (int) totCount); node.inputCount = 1; } else { // stragglers! carry count upwards node.inputCount = totCount; } frontier[idx] = new Builder.UnCompiledNode<Object>(blockBuilder, idx); } } } // Write the top count entries on the pending stack as // one or more blocks. Returns how many blocks were // written. If the entry count is <= maxItemsPerBlock // we just write a single block; else we break into // primary (initial) block and then one or more // following floor blocks: void writeBlocks(IntsRef prevTerm, int prefixLength, int count) throws IOException { if (prefixLength == 0 || count <= maxItemsInBlock) { // Easy case: not floor block. Eg, prefix is "foo", // and we found 30 terms/sub-blocks starting w/ that // prefix, and minItemsInBlock <= 30 <= // maxItemsInBlock. final PendingBlock nonFloorBlock = writeBlock(prevTerm, prefixLength, prefixLength, count, count, 0, false, -1, true); nonFloorBlock.compileIndex(null, scratchBytes); pending.add(nonFloorBlock); } else { // Floor block case. Eg, prefix is "foo" but we // have 100 terms/sub-blocks starting w/ that // prefix. We segment the entries into a primary // block and following floor blocks using the first // label in the suffix to assign to floor blocks. // TODO: we could store min & max suffix start byte // in each block, to make floor blocks authoritative //if (DEBUG) { // final BytesRef prefix = new BytesRef(prefixLength); // for(int m=0;m<prefixLength;m++) { // prefix.bytes[m] = (byte) prevTerm.ints[m]; // } // prefix.length = prefixLength; // //System.out.println("\nWBS count=" + count + " prefix=" + prefix.utf8ToString() + " " + prefix); // System.out.println("writeBlocks: prefix=" + prefix + " " + prefix + " count=" + count + " pending.size()=" + pending.size()); //} //System.out.println("\nwbs count=" + count); final int savLabel = prevTerm.ints[prevTerm.offset + prefixLength]; // Count up how many items fall under // each unique label after the prefix. // TODO: this is wasteful since the builder had // already done this (partitioned these sub-terms // according to their leading prefix byte) final List<PendingEntry> slice = pending.subList(pending.size()-count, pending.size()); int lastSuffixLeadLabel = -1; int termCount = 0; int subCount = 0; int numSubs = 0; for(PendingEntry ent : slice) { // First byte in the suffix of this term final int suffixLeadLabel; if (ent.isTerm) { PendingTerm term = (PendingTerm) ent; if (term.term.length == prefixLength) { // Suffix is 0, ie prefix 'foo' and term is // 'foo' so the term has empty string suffix // in this block assert lastSuffixLeadLabel == -1; assert numSubs == 0; suffixLeadLabel = -1; } else { suffixLeadLabel = term.term.bytes[term.term.offset + prefixLength] & 0xff; } } else { PendingBlock block = (PendingBlock) ent; assert block.prefix.length > prefixLength; suffixLeadLabel = block.prefix.bytes[block.prefix.offset + prefixLength] & 0xff; } if (suffixLeadLabel != lastSuffixLeadLabel && (termCount + subCount) != 0) { if (subBytes.length == numSubs) { subBytes = ArrayUtil.grow(subBytes); subTermCounts = ArrayUtil.grow(subTermCounts); subSubCounts = ArrayUtil.grow(subSubCounts); } subBytes[numSubs] = lastSuffixLeadLabel; lastSuffixLeadLabel = suffixLeadLabel; subTermCounts[numSubs] = termCount; subSubCounts[numSubs] = subCount; /* if (suffixLeadLabel == -1) { System.out.println(" sub " + -1 + " termCount=" + termCount + " subCount=" + subCount); } else { System.out.println(" sub " + Integer.toHexString(suffixLeadLabel) + " termCount=" + termCount + " subCount=" + subCount); } */ termCount = subCount = 0; numSubs++; } if (ent.isTerm) { termCount++; } else { subCount++; } } if (subBytes.length == numSubs) { subBytes = ArrayUtil.grow(subBytes); subTermCounts = ArrayUtil.grow(subTermCounts); subSubCounts = ArrayUtil.grow(subSubCounts); } subBytes[numSubs] = lastSuffixLeadLabel; subTermCounts[numSubs] = termCount; subSubCounts[numSubs] = subCount; numSubs++; /* if (lastSuffixLeadLabel == -1) { System.out.println(" sub " + -1 + " termCount=" + termCount + " subCount=" + subCount); } else { System.out.println(" sub " + Integer.toHexString(lastSuffixLeadLabel) + " termCount=" + termCount + " subCount=" + subCount); } */ if (subTermCountSums.length < numSubs) { subTermCountSums = ArrayUtil.grow(subTermCountSums, numSubs); } // Roll up (backwards) the termCounts; postings impl // needs this to know where to pull the term slice // from its pending terms stack: int sum = 0; for(int idx=numSubs-1;idx>=0;idx--) { sum += subTermCounts[idx]; subTermCountSums[idx] = sum; } // TODO: make a better segmenter? It'd have to // absorb the too-small end blocks backwards into // the previous blocks // Naive greedy segmentation; this is not always // best (it can produce a too-small block as the // last block): int pendingCount = 0; int startLabel = subBytes[0]; int curStart = count; subCount = 0; final List<PendingBlock> floorBlocks = new ArrayList<PendingBlock>(); PendingBlock firstBlock = null; for(int sub=0;sub<numSubs;sub++) { pendingCount += subTermCounts[sub] + subSubCounts[sub]; //System.out.println(" " + (subTermCounts[sub] + subSubCounts[sub])); subCount++; // Greedily make a floor block as soon as we've // crossed the min count if (pendingCount >= minItemsInBlock) { final int curPrefixLength; if (startLabel == -1) { curPrefixLength = prefixLength; } else { curPrefixLength = 1+prefixLength; // floor term: prevTerm.ints[prevTerm.offset + prefixLength] = startLabel; } //System.out.println(" " + subCount + " subs"); final PendingBlock floorBlock = writeBlock(prevTerm, prefixLength, curPrefixLength, curStart, pendingCount, subTermCountSums[1+sub], true, startLabel, curStart == pendingCount); if (firstBlock == null) { firstBlock = floorBlock; } else { floorBlocks.add(floorBlock); } curStart -= pendingCount; //System.out.println(" = " + pendingCount); pendingCount = 0; assert minItemsInBlock == 1 || subCount > 1: "minItemsInBlock=" + minItemsInBlock + " subCount=" + subCount + " sub=" + sub + " of " + numSubs + " subTermCount=" + subTermCountSums[sub] + " subSubCount=" + subSubCounts[sub] + " depth=" + prefixLength; subCount = 0; startLabel = subBytes[sub+1]; if (curStart == 0) { break; } if (curStart <= maxItemsInBlock) { // remainder is small enough to fit into a // block. NOTE that this may be too small (< // minItemsInBlock); need a true segmenter // here assert startLabel != -1; assert firstBlock != null; prevTerm.ints[prevTerm.offset + prefixLength] = startLabel; //System.out.println(" final " + (numSubs-sub-1) + " subs"); /* for(sub++;sub < numSubs;sub++) { System.out.println(" " + (subTermCounts[sub] + subSubCounts[sub])); } System.out.println(" = " + curStart); if (curStart < minItemsInBlock) { System.out.println(" **"); } */ floorBlocks.add(writeBlock(prevTerm, prefixLength, prefixLength+1, curStart, curStart, 0, true, startLabel, true)); break; } } } prevTerm.ints[prevTerm.offset + prefixLength] = savLabel; assert firstBlock != null; firstBlock.compileIndex(floorBlocks, scratchBytes); pending.add(firstBlock); //if (DEBUG) System.out.println(" done pending.size()=" + pending.size()); } lastBlockIndex = pending.size()-1; } // for debugging @SuppressWarnings("unused") private String toString(BytesRef b) { try { return b.utf8ToString() + " " + b; } catch (Throwable t) { // If BytesRef isn't actually UTF8, or it's eg a // prefix of UTF8 that ends mid-unicode-char, we // fallback to hex: return b.toString(); } } // Writes all entries in the pending slice as a single // block: private PendingBlock writeBlock(IntsRef prevTerm, int prefixLength, int indexPrefixLength, int startBackwards, int length, int futureTermCount, boolean isFloor, int floorLeadByte, boolean isLastInFloor) throws IOException { assert length > 0; final int start = pending.size()-startBackwards; assert start >= 0: "pending.size()=" + pending.size() + " startBackwards=" + startBackwards + " length=" + length; final List<PendingEntry> slice = pending.subList(start, start + length); final long startFP = out.getFilePointer(); final BytesRef prefix = new BytesRef(indexPrefixLength); for(int m=0;m<indexPrefixLength;m++) { prefix.bytes[m] = (byte) prevTerm.ints[m]; } prefix.length = indexPrefixLength; // Write block header: out.writeVInt((length<<1)|(isLastInFloor ? 1:0)); // if (DEBUG) { // System.out.println(" writeBlock " + (isFloor ? "(floor) " : "") + "seg=" + segment + " pending.size()=" + pending.size() + " prefixLength=" + prefixLength + " indexPrefix=" + toString(prefix) + " entCount=" + length + " startFP=" + startFP + " futureTermCount=" + futureTermCount + (isFloor ? (" floorLeadByte=" + Integer.toHexString(floorLeadByte&0xff)) : "") + " isLastInFloor=" + isLastInFloor); // } // 1st pass: pack term suffix bytes into byte[] blob // TODO: cutover to bulk int codec... simple64? final boolean isLeafBlock; if (lastBlockIndex < start) { // This block definitely does not contain sub-blocks: isLeafBlock = true; //System.out.println("no scan true isFloor=" + isFloor); } else if (!isFloor) { // This block definitely does contain at least one sub-block: isLeafBlock = false; //System.out.println("no scan false " + lastBlockIndex + " vs start=" + start + " len=" + length); } else { // Must scan up-front to see if there is a sub-block boolean v = true; //System.out.println("scan " + lastBlockIndex + " vs start=" + start + " len=" + length); for (PendingEntry ent : slice) { if (!ent.isTerm) { v = false; break; } } isLeafBlock = v; } final List<FST<BytesRef>> subIndices; int termCount; if (isLeafBlock) { subIndices = null; for (PendingEntry ent : slice) { assert ent.isTerm; PendingTerm term = (PendingTerm) ent; final int suffix = term.term.length - prefixLength; // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); // System.arraycopy(term.term.bytes, prefixLength, suffixBytes.bytes, 0, suffix); // suffixBytes.length = suffix; // System.out.println(" write term suffix=" + suffixBytes); // } // For leaf block we write suffix straight bytesWriter.writeVInt(suffix); bytesWriter.writeBytes(term.term.bytes, prefixLength, suffix); // Write term stats, to separate byte[] blob: bytesWriter2.writeVInt(term.stats.docFreq); if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { assert term.stats.totalTermFreq >= term.stats.docFreq; bytesWriter2.writeVLong(term.stats.totalTermFreq - term.stats.docFreq); } } termCount = length; } else { subIndices = new ArrayList<FST<BytesRef>>(); termCount = 0; for (PendingEntry ent : slice) { if (ent.isTerm) { PendingTerm term = (PendingTerm) ent; final int suffix = term.term.length - prefixLength; // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); // System.arraycopy(term.term.bytes, prefixLength, suffixBytes.bytes, 0, suffix); // suffixBytes.length = suffix; // System.out.println(" write term suffix=" + suffixBytes); // } // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block bytesWriter.writeVInt(suffix<<1); bytesWriter.writeBytes(term.term.bytes, prefixLength, suffix); // Write term stats, to separate byte[] blob: bytesWriter2.writeVInt(term.stats.docFreq); if (fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { assert term.stats.totalTermFreq >= term.stats.docFreq; bytesWriter2.writeVLong(term.stats.totalTermFreq - term.stats.docFreq); } termCount++; } else { PendingBlock block = (PendingBlock) ent; final int suffix = block.prefix.length - prefixLength; assert suffix > 0; // For non-leaf block we borrow 1 bit to record // if entry is term or sub-block bytesWriter.writeVInt((suffix<<1)|1); bytesWriter.writeBytes(block.prefix.bytes, prefixLength, suffix); assert block.fp < startFP; // if (DEBUG) { // BytesRef suffixBytes = new BytesRef(suffix); // System.arraycopy(block.prefix.bytes, prefixLength, suffixBytes.bytes, 0, suffix); // suffixBytes.length = suffix; // System.out.println(" write sub-block suffix=" + toString(suffixBytes) + " subFP=" + block.fp + " subCode=" + (startFP-block.fp) + " floor=" + block.isFloor); // } bytesWriter.writeVLong(startFP - block.fp); subIndices.add(block.index); } } assert subIndices.size() != 0; } // TODO: we could block-write the term suffix pointers; // this would take more space but would enable binary // search on lookup // Write suffixes byte[] blob to terms dict output: out.writeVInt((int) (bytesWriter.getFilePointer() << 1) | (isLeafBlock ? 1:0)); bytesWriter.writeTo(out); bytesWriter.reset(); // Write term stats byte[] blob out.writeVInt((int) bytesWriter2.getFilePointer()); bytesWriter2.writeTo(out); bytesWriter2.reset(); // Have postings writer write block postingsWriter.flushTermsBlock(futureTermCount+termCount, termCount); // Remove slice replaced by block: slice.clear(); if (lastBlockIndex >= start) { if (lastBlockIndex < start+length) { lastBlockIndex = start; } else { lastBlockIndex -= length; } } // if (DEBUG) { // System.out.println(" fpEnd=" + out.getFilePointer()); // } return new PendingBlock(prefix, startFP, termCount != 0, isFloor, floorLeadByte, subIndices); } TermsWriter(FieldInfo fieldInfo) { this.fieldInfo = fieldInfo; noOutputs = NoOutputs.getSingleton(); // This Builder is just used transiently to fragment // terms into "good" blocks; we don't save the // resulting FST: blockBuilder = new Builder<Object>(FST.INPUT_TYPE.BYTE1, 0, 0, true, true, Integer.MAX_VALUE, noOutputs, new FindBlocks(), false); postingsWriter.setField(fieldInfo); } @Override public Comparator<BytesRef> getComparator() { return BytesRef.getUTF8SortedAsUnicodeComparator(); } @Override public PostingsConsumer startTerm(BytesRef text) throws IOException { //if (DEBUG) System.out.println("\nBTTW.startTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment); postingsWriter.startTerm(); /* if (fieldInfo.name.equals("id")) { postingsWriter.termID = Integer.parseInt(text.utf8ToString()); } else { postingsWriter.termID = -1; } */ return postingsWriter; } private final IntsRef scratchIntsRef = new IntsRef(); @Override public void finishTerm(BytesRef text, TermStats stats) throws IOException { assert stats.docFreq > 0; //if (DEBUG) System.out.println("BTTW.finishTerm term=" + fieldInfo.name + ":" + toString(text) + " seg=" + segment + " df=" + stats.docFreq); blockBuilder.add(Util.toIntsRef(text, scratchIntsRef), noOutputs.getNoOutput()); pending.add(new PendingTerm(BytesRef.deepCopyOf(text), stats)); postingsWriter.finishTerm(stats); numTerms++; } // Finishes all terms in this field @Override public void finish(long sumTotalTermFreq, long sumDocFreq, int docCount) throws IOException { if (numTerms > 0) { blockBuilder.finish(); // We better have one final "root" block: assert pending.size() == 1 && !pending.get(0).isTerm: "pending.size()=" + pending.size() + " pending=" + pending; final PendingBlock root = (PendingBlock) pending.get(0); assert root.prefix.length == 0; assert root.index.getEmptyOutput() != null; this.sumTotalTermFreq = sumTotalTermFreq; this.sumDocFreq = sumDocFreq; this.docCount = docCount; // Write FST to index indexStartFP = indexOut.getFilePointer(); root.index.save(indexOut); //System.out.println(" write FST " + indexStartFP + " field=" + fieldInfo.name); // if (SAVE_DOT_FILES || DEBUG) { // final String dotFileName = segment + "_" + fieldInfo.name + ".dot"; // Writer w = new OutputStreamWriter(new FileOutputStream(dotFileName)); // Util.toDot(root.index, w, false, false); // System.out.println("SAVED to " + dotFileName); // w.close(); // } } } private final RAMOutputStream bytesWriter = new RAMOutputStream(); private final RAMOutputStream bytesWriter2 = new RAMOutputStream(); } @Override public void close() throws IOException { IOException ioe = null; try { int nonZeroCount = 0; for(TermsWriter field : fields) { if (field.numTerms > 0) { nonZeroCount++; } } final long dirStart = out.getFilePointer(); final long indexDirStart = indexOut.getFilePointer(); out.writeVInt(nonZeroCount); for(TermsWriter field : fields) { if (field.numTerms > 0) { //System.out.println(" field " + field.fieldInfo.name + " " + field.numTerms + " terms"); out.writeVInt(field.fieldInfo.number); out.writeVLong(field.numTerms); final BytesRef rootCode = ((PendingBlock) field.pending.get(0)).index.getEmptyOutput(); assert rootCode != null: "field=" + field.fieldInfo.name + " numTerms=" + field.numTerms; out.writeVInt(rootCode.length); out.writeBytes(rootCode.bytes, rootCode.offset, rootCode.length); if (field.fieldInfo.getIndexOptions() != IndexOptions.DOCS_ONLY) { out.writeVLong(field.sumTotalTermFreq); } out.writeVLong(field.sumDocFreq); out.writeVInt(field.docCount); indexOut.writeVLong(field.indexStartFP); } } writeTrailer(out, dirStart); writeIndexTrailer(indexOut, indexDirStart); } catch (IOException ioe2) { ioe = ioe2; } finally { IOUtils.closeWhileHandlingException(ioe, out, indexOut, postingsWriter); } } }
add asserts git-svn-id: 13f9c63152c129021c7e766f4ef575faaaa595a2@1347000 13f79535-47bb-0310-9956-ffa450edef68
lucene/core/src/java/org/apache/lucene/codecs/BlockTreeTermsWriter.java
add asserts
Java
apache-2.0
d41c2f51f5e19c26b5d5299ed69589821a903023
0
speedment/speedment,speedment/speedment
/** * * Copyright (c) 2006-2016, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.speedment.internal.util.document; import com.speedment.Speedment; import com.speedment.config.Document; import com.speedment.config.db.Column; import com.speedment.config.db.Dbms; import com.speedment.config.db.ForeignKey; import com.speedment.config.db.ForeignKeyColumn; import com.speedment.config.db.Index; import com.speedment.config.db.IndexColumn; import com.speedment.config.db.PrimaryKeyColumn; import com.speedment.config.db.Project; import com.speedment.config.db.Schema; import com.speedment.config.db.Table; import com.speedment.config.db.parameters.DbmsType; import com.speedment.exception.SpeedmentException; import static com.speedment.util.StaticClassUtil.instanceNotAllowed; import com.speedment.util.StreamComposition; import static java.util.stream.Collectors.joining; import java.util.stream.Stream; /** * * @author pemi */ public final class DocumentDbUtil { public static DbmsType dbmsTypeOf(Speedment speedment, Dbms dbms) { final String typeName = dbms.getTypeName(); return speedment.getDbmsHandlerComponent().findByName(typeName) .orElseThrow(() -> new SpeedmentException( "Unable to find the database type " + typeName + ". The installed types are: " + speedment.getDbmsHandlerComponent().supportedDbmsTypes() .map(DbmsType::getName) .collect(joining(", ")) )); } public static Stream<? extends Document> traverseOver(Project project) { return Stream.concat(project.dbmses(), project.dbmses().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(Dbms dbms) { return Stream.concat(dbms.schemas(), dbms.schemas().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(Schema schema) { return Stream.concat(schema.tables(), schema.tables().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(Table table) { return StreamComposition.concat( Stream.concat(table.columns(), table.columns().flatMap(DocumentDbUtil::traverseOver)), Stream.concat(table.primaryKeyColumns(), table.primaryKeyColumns().flatMap(DocumentDbUtil::traverseOver)), Stream.concat(table.indexes(), table.indexes().flatMap(DocumentDbUtil::traverseOver)), Stream.concat(table.foreignKeys(), table.foreignKeys().flatMap(DocumentDbUtil::traverseOver)) ); } public static Stream<? extends Document> traverseOver(Column column) { return Stream.empty(); } public static Stream<? extends Document> traverseOver(PrimaryKeyColumn primaryKeyColumn) { return Stream.empty(); } public static Stream<? extends Document> traverseOver(Index index) { return Stream.concat(index.indexColumns(), index.indexColumns().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(IndexColumn indexColumn) { return Stream.empty(); } public static Stream<? extends Document> traverseOver(ForeignKey foreignKey) { return Stream.concat(foreignKey.foreignKeyColumns(), foreignKey.foreignKeyColumns().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(ForeignKeyColumn foreignKeyColumn) { return Stream.empty(); } public static <T> Stream<T> traverseOver(Project project, Class<T> clazz) { if (Dbms.class.isAssignableFrom(clazz)) { return project.dbmses().map(clazz::cast); } else { return project.dbmses().flatMap(dbms -> traverseOver(dbms, clazz)); } } public static <T> Stream<T> traverseOver(Dbms dbms, Class<T> clazz) { if (Schema.class.isAssignableFrom(clazz)) { return dbms.schemas().map(clazz::cast); } else { return dbms.schemas().flatMap(schema -> traverseOver(schema, clazz)); } } public static <T> Stream<T> traverseOver(Schema schema, Class<T> clazz) { if (Table.class.isAssignableFrom(clazz)) { return schema.tables().map(clazz::cast); } else { return schema.tables().flatMap(table -> traverseOver(table, clazz)); } } public static <T> Stream<T> traverseOver(Table table, Class<T> clazz) { if (Column.class.isAssignableFrom(clazz)) { return table.columns().map(clazz::cast); } else if (PrimaryKeyColumn.class.isAssignableFrom(clazz)) { return table.primaryKeyColumns().map(clazz::cast); } else if (Index.class.isAssignableFrom(clazz)) { return table.indexes().map(clazz::cast); } else if (ForeignKey.class.isAssignableFrom(clazz)) { return table.foreignKeys().map(clazz::cast); } else { final Stream.Builder<T> sb = Stream.builder(); table.columns().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); table.primaryKeyColumns().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); table.indexes().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); table.foreignKeys().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); return sb.build(); } } public static <T> Stream<T> traverseOver(Column column, Class<T> clazz) { return Stream.empty(); } public static <T> Stream<T> traverseOver(PrimaryKeyColumn pkColumn, Class<T> clazz) { return Stream.empty(); } public static <T> Stream<T> traverseOver(Index index, Class<T> clazz) { if (IndexColumn.class.isAssignableFrom(clazz)) { return index.indexColumns().map(clazz::cast); } else { return index.indexColumns().flatMap(ic -> traverseOver(ic, clazz)); } } public static <T> Stream<T> traverseOver(IndexColumn indexColumn, Class<T> clazz) { return Stream.empty(); } public static <T> Stream<T> traverseOver(ForeignKey fk, Class<T> clazz) { if (ForeignKeyColumn.class.isAssignableFrom(clazz)) { return fk.foreignKeyColumns().map(clazz::cast); } else { return fk.foreignKeyColumns().flatMap(fcc -> traverseOver(fcc, clazz)); } } public static <T> Stream<T> traverseOver(ForeignKeyColumn foreignKeyColumn, Class<T> clazz) { return Stream.empty(); } public static Stream<? extends Document> typedChildrenOf(Table table) { return StreamComposition.concat( table.columns().map(Document.class::cast), table.primaryKeyColumns().map(Document.class::cast), table.indexes().map(Document.class::cast), table.foreignKeys().map(Document.class::cast) ); } // public static Class<? extends Document> mainInterfaceClass(Document document) { // return Stream.of( // Column.class, // Dbms.class, // ForeignKey.class, // ForeignKeyColumn.class, // Index.class, // IndexColumn.class, // PrimaryKeyColumn.class, // Project.class, // Schema.class, // Table.class // ) // .filter(c -> c.isAssignableFrom(document.getClass())) // .findAny() // .orElseThrow(() -> new SpeedmentException("Unable to find main interface for " + document)); // // } // // // public static <T, P, C, B> Stream<T> traverseOver( // P parent, // Class<T> clazz, // Function<P, Stream<B>> streamer, // Class<C> childClass, // Function<B, Stream<T>> recursor // ) { // if (childClass.isAssignableFrom(clazz)) { // return streamer.apply(parent).map(clazz::cast); // } else { // return streamer.apply(parent).flatMap(recursor); // } // } /** * Utility classes should not be instantiated. */ private DocumentDbUtil() { instanceNotAllowed(getClass()); } }
src/main/java/com/speedment/internal/util/document/DocumentDbUtil.java
/** * * Copyright (c) 2006-2016, Speedment, Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); You may not * use this file except in compliance with the License. You may obtain a copy of * the License at: * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package com.speedment.internal.util.document; import com.speedment.Speedment; import com.speedment.config.Document; import com.speedment.config.db.Column; import com.speedment.config.db.Dbms; import com.speedment.config.db.ForeignKey; import com.speedment.config.db.ForeignKeyColumn; import com.speedment.config.db.Index; import com.speedment.config.db.IndexColumn; import com.speedment.config.db.PrimaryKeyColumn; import com.speedment.config.db.Project; import com.speedment.config.db.Schema; import com.speedment.config.db.Table; import com.speedment.config.db.parameters.DbmsType; import com.speedment.exception.SpeedmentException; import static com.speedment.util.StaticClassUtil.instanceNotAllowed; import com.speedment.util.StreamComposition; import java.util.stream.Stream; /** * * @author pemi */ public final class DocumentDbUtil { public static DbmsType dbmsTypeOf(Speedment speedment, Dbms dbms) { return speedment.getDbmsHandlerComponent().findByName(dbms.getTypeName()) .orElseThrow(() -> new SpeedmentException("Unable to find the database type " + dbms.getTypeName())); } public static Stream<? extends Document> traverseOver(Project project) { return Stream.concat(project.dbmses(), project.dbmses().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(Dbms dbms) { return Stream.concat(dbms.schemas(), dbms.schemas().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(Schema schema) { return Stream.concat(schema.tables(), schema.tables().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(Table table) { return StreamComposition.concat( Stream.concat(table.columns(), table.columns().flatMap(DocumentDbUtil::traverseOver)), Stream.concat(table.primaryKeyColumns(), table.primaryKeyColumns().flatMap(DocumentDbUtil::traverseOver)), Stream.concat(table.indexes(), table.indexes().flatMap(DocumentDbUtil::traverseOver)), Stream.concat(table.foreignKeys(), table.foreignKeys().flatMap(DocumentDbUtil::traverseOver)) ); } public static Stream<? extends Document> traverseOver(Column column) { return Stream.empty(); } public static Stream<? extends Document> traverseOver(PrimaryKeyColumn primaryKeyColumn) { return Stream.empty(); } public static Stream<? extends Document> traverseOver(Index index) { return Stream.concat(index.indexColumns(), index.indexColumns().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(IndexColumn indexColumn) { return Stream.empty(); } public static Stream<? extends Document> traverseOver(ForeignKey foreignKey) { return Stream.concat(foreignKey.foreignKeyColumns(), foreignKey.foreignKeyColumns().flatMap(DocumentDbUtil::traverseOver)); } public static Stream<? extends Document> traverseOver(ForeignKeyColumn foreignKeyColumn) { return Stream.empty(); } public static <T> Stream<T> traverseOver(Project project, Class<T> clazz) { if (Dbms.class.isAssignableFrom(clazz)) { return project.dbmses().map(clazz::cast); } else { return project.dbmses().flatMap(dbms -> traverseOver(dbms, clazz)); } } public static <T> Stream<T> traverseOver(Dbms dbms, Class<T> clazz) { if (Schema.class.isAssignableFrom(clazz)) { return dbms.schemas().map(clazz::cast); } else { return dbms.schemas().flatMap(schema -> traverseOver(schema, clazz)); } } public static <T> Stream<T> traverseOver(Schema schema, Class<T> clazz) { if (Table.class.isAssignableFrom(clazz)) { return schema.tables().map(clazz::cast); } else { return schema.tables().flatMap(table -> traverseOver(table, clazz)); } } public static <T> Stream<T> traverseOver(Table table, Class<T> clazz) { if (Column.class.isAssignableFrom(clazz)) { return table.columns().map(clazz::cast); } else if (PrimaryKeyColumn.class.isAssignableFrom(clazz)) { return table.primaryKeyColumns().map(clazz::cast); } else if (Index.class.isAssignableFrom(clazz)) { return table.indexes().map(clazz::cast); } else if (ForeignKey.class.isAssignableFrom(clazz)) { return table.foreignKeys().map(clazz::cast); } else { final Stream.Builder<T> sb = Stream.builder(); table.columns().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); table.primaryKeyColumns().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); table.indexes().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); table.foreignKeys().flatMap(c -> traverseOver(c, clazz)).forEachOrdered(sb::accept); return sb.build(); } } public static <T> Stream<T> traverseOver(Column column, Class<T> clazz) { return Stream.empty(); } public static <T> Stream<T> traverseOver(PrimaryKeyColumn pkColumn, Class<T> clazz) { return Stream.empty(); } public static <T> Stream<T> traverseOver(Index index, Class<T> clazz) { if (IndexColumn.class.isAssignableFrom(clazz)) { return index.indexColumns().map(clazz::cast); } else { return index.indexColumns().flatMap(ic -> traverseOver(ic, clazz)); } } public static <T> Stream<T> traverseOver(IndexColumn indexColumn, Class<T> clazz) { return Stream.empty(); } public static <T> Stream<T> traverseOver(ForeignKey fk, Class<T> clazz) { if (ForeignKeyColumn.class.isAssignableFrom(clazz)) { return fk.foreignKeyColumns().map(clazz::cast); } else { return fk.foreignKeyColumns().flatMap(fcc -> traverseOver(fcc, clazz)); } } public static <T> Stream<T> traverseOver(ForeignKeyColumn foreignKeyColumn, Class<T> clazz) { return Stream.empty(); } public static Stream<? extends Document> typedChildrenOf(Table table) { return StreamComposition.concat( table.columns().map(Document.class::cast), table.primaryKeyColumns().map(Document.class::cast), table.indexes().map(Document.class::cast), table.foreignKeys().map(Document.class::cast) ); } // public static Class<? extends Document> mainInterfaceClass(Document document) { // return Stream.of( // Column.class, // Dbms.class, // ForeignKey.class, // ForeignKeyColumn.class, // Index.class, // IndexColumn.class, // PrimaryKeyColumn.class, // Project.class, // Schema.class, // Table.class // ) // .filter(c -> c.isAssignableFrom(document.getClass())) // .findAny() // .orElseThrow(() -> new SpeedmentException("Unable to find main interface for " + document)); // // } // // // public static <T, P, C, B> Stream<T> traverseOver( // P parent, // Class<T> clazz, // Function<P, Stream<B>> streamer, // Class<C> childClass, // Function<B, Stream<T>> recursor // ) { // if (childClass.isAssignableFrom(clazz)) { // return streamer.apply(parent).map(clazz::cast); // } else { // return streamer.apply(parent).flatMap(recursor); // } // } /** * Utility classes should not be instantiated. */ private DocumentDbUtil() { instanceNotAllowed(getClass()); } }
Add more exception info
src/main/java/com/speedment/internal/util/document/DocumentDbUtil.java
Add more exception info
Java
apache-2.0
e185d84b41c66d92e17fca84d3fa1bce0ffbcd2a
0
solonaruvioletta/java_testing,solonaruvioletta/java_testing
package ru.stqa.pft.addressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.firefox.FirefoxDriver; import ru.stqa.pft.addressbook.model.ContactData; /** * Created by 1 on 29.01.2017. */ public class ContactHelper extends HelperBase { public ContactHelper(FirefoxDriver wd) { super(wd); } public void fillContactForm(ContactData contactData) { type(By.name("firstname"), contactData.getFirstname()); type(By.name("middlename"), contactData.getMiddlename()); type(By.name("lastname"), contactData.getLastname()); type(By.name("nickname"), contactData.getNickname()); type(By.name("title"), contactData.getTitle()); type(By.name("company"), contactData.getCompany()); type(By.name("address"), contactData.getAddress()); type(By.name("home"), contactData.getHomephone()); type(By.name("mobile"), contactData.getMobilephone()); type(By.name("work"), contactData.getWorkphone()); type(By.name("fax"), contactData.getFax()); type(By.name("email"), contactData.getEmail()); } public void goToMainPage() { click(By.xpath("//div[@id='content']/form/input[21]")); } public void enterContactInformation() { click(By.name("theform")); } }
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmanager/ContactHelper.java
package ru.stqa.pft.addressbook.appmanager; import org.openqa.selenium.By; import org.openqa.selenium.firefox.FirefoxDriver; import ru.stqa.pft.addressbook.model.ContactData; /** * Created by 1 on 29.01.2017. */ public class ContactHelper extends HelperBase { public ContactHelper(FirefoxDriver wd) { super(wd); } public void fillContactForm(ContactData contactData) { type(By.name("firstname"), contactData.getFirstname()); type(By.name("middlename"), contactData.getMiddlename()); type(By.name("lastname"), contactData.getLastname()); type(By.name("nickname"), contactData.getNickname()); type(By.name("title"), contactData.getTitle()); type(By.name("company"), contactData.getCompany()); type(By.name("address"), contactData.getAddress()); type(By.name("home"), contactData.getHomephone()); type(By.name("mobile"), contactData.getMobilephone()); type(By.name("work"), contactData.getWorkphone()); type(By.name("fax"), contactData.getFax()); type(By.name("email"), contactData.getEmail()); if (!wd.findElement(By.xpath("//div[@id='content']/form/select[1]//option[11]")).isSelected()) { wd.findElement(By.xpath("//div[@id='content']/form/select[1]//option[11]")).click(); } if (!wd.findElement(By.xpath("//div[@id='content']/form/select[2]//option[4]")).isSelected()) { wd.findElement(By.xpath("//div[@id='content']/form/select[2]//option[4]")).click(); } type(By.name("byear"), contactData.getBirthyear()); } public void goToMainPage() { click(By.xpath("//div[@id='content']/form/input[21]")); } public void enterContactInformation() { click(By.name("theform")); } }
BaseHelper. Низкоуровневые вспомогательные методы
addressbook-web-tests/src/test/java/ru/stqa/pft/addressbook/appmanager/ContactHelper.java
BaseHelper. Низкоуровневые вспомогательные методы
Java
mit
5de97aaee72df9602dfafec10a2598495ca4c62f
0
ppati000/visualDFA
package gui; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JPanel; import javax.swing.border.BevelBorder; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import controller.Controller; import java.awt.GridBagLayout; import javax.swing.JButton; import javax.swing.JLabel; import java.awt.GridBagConstraints; import javax.swing.JComboBox; /** * The InputPanel Class contains UI-elements to let the user start a new data * flow analysis. * * @author Michael * */ public class InputPanel extends JPanel { private Controller ctrl; private CodeField codeField; private JButton btnOpen; private JButton btnSave; private JLabel lblAnalyses; private JComboBox<String> comboBox_Analyses; private JLabel lblWorklists; private JComboBox<String> comboBox_Worklists; private JButton btnStartAnalysis; /** * Create the panel. Set the controller, so the ActionListeners can access * it. * * @param ctrl * The Controller to be accessed in case of events. * * @see controller.Controller * @see ActionListener */ public InputPanel(Controller ctrl) { this.ctrl = ctrl; setBorder( new CompoundBorder(new BevelBorder(BevelBorder.RAISED, Colors.GREY_BORDER.getColor(), null, null, null), new EmptyBorder(5, 5, 5, 5))); setBackground(Colors.BACKGROUND.getColor()); GridBagLayout gridBagLayout = new GridBagLayout(); gridBagLayout.columnWidths = new int[] { 0, 0, 0 }; gridBagLayout.rowHeights = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 }; gridBagLayout.columnWeights = new double[] { 1.0, 0.0, Double.MIN_VALUE }; gridBagLayout.rowWeights = new double[] { 0.5, 0.5, 0.5, 0.5, 0.0, 0.5, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5, 0.1, 0.1 }; setLayout(gridBagLayout); JComponentDecorator jCompDecorator = new JComponentDecorator(); JButtonDecorator jBuDecorator = new JButtonDecorator(jCompDecorator); JLabelDecorator jLaDecorator = new JLabelDecorator(jCompDecorator); codeField = new CodeField(true); GridBagConstraints gbc_codeField = GridBagConstraintFactory.getStandardGridBagConstraints(0, 0, 2, 4); add(codeField, gbc_codeField); btnOpen = new JButton(); jBuDecorator.decorateIconButton(btnOpen, "icons/open-folder-outline.png", 0.2, null, "Open ..."); btnOpen.setBackground(Colors.WHITE_BACKGROUND.getColor()); btnOpen.setForeground(Colors.DARK_TEXT.getColor()); GridBagConstraints gbc_btnOpen = GridBagConstraintFactory.getStandardGridBagConstraints(0, 4, 1, 1); gbc_btnOpen.insets.set(gbc_btnOpen.insets.top, gbc_btnOpen.insets.left, gbc_btnOpen.insets.bottom, 0); add(btnOpen, gbc_btnOpen); btnSave = new JButton(); jBuDecorator.decorateIconButton(btnSave, "icons/save-file-option.png", 0.2, null, "Save ..."); btnSave.setBackground(Colors.WHITE_BACKGROUND.getColor()); btnSave.setForeground(Colors.DARK_TEXT.getColor()); GridBagConstraints gbc_btnSave = GridBagConstraintFactory.getStandardGridBagConstraints(1, 4, 1, 1); gbc_btnSave.insets.set(gbc_btnSave.insets.top, 0, gbc_btnSave.insets.bottom, gbc_btnSave.insets.right); add(btnSave, gbc_btnSave); lblAnalyses = new JLabel(); jLaDecorator.decorateLabel(lblAnalyses, "Analysis"); GridBagConstraints gbc_lblAnalyses = GridBagConstraintFactory.getStandardGridBagConstraints(0, 6, 2, 1); add(lblAnalyses, gbc_lblAnalyses); comboBox_Analyses = new JComboBox<String>(ctrl.getAnalyses().toArray(new String[0])); GridBagConstraints gbc_comboBox_Analyses = GridBagConstraintFactory.getStandardGridBagConstraints(0, 7, 2, 1); gbc_comboBox_Analyses.fill = GridBagConstraints.HORIZONTAL; add(comboBox_Analyses, gbc_comboBox_Analyses); lblWorklists = new JLabel(); jLaDecorator.decorateLabel(lblWorklists, "Worklist Algorithm"); GridBagConstraints gbc_lblWorklists = GridBagConstraintFactory.getStandardGridBagConstraints(0, 9, 2, 1); add(lblWorklists, gbc_lblWorklists); comboBox_Worklists = new JComboBox<String>(ctrl.getWorklists().toArray(new String[0])); GridBagConstraints gbc_comboBox_Worklist = GridBagConstraintFactory.getStandardGridBagConstraints(0, 10, 2, 1); gbc_comboBox_Worklist.fill = GridBagConstraints.HORIZONTAL; add(comboBox_Worklists, gbc_comboBox_Worklist); btnStartAnalysis = new JButton(); jBuDecorator.decorateButton(btnStartAnalysis, new StartAnalysisListener(), "Start Analysis"); btnStartAnalysis.setBackground(Colors.GREEN_BACKGROUND.getColor()); btnStartAnalysis.setFont(new Font("Trebuchet MS", Font.BOLD, 24)); GridBagConstraints gbc_btnStartAnalysis = GridBagConstraintFactory.getStandardGridBagConstraints(0, 12, 2, 2); add(btnStartAnalysis, gbc_btnStartAnalysis); } /** * Activate or deactivate the InputPanel. If deactivated, all JComponents * which are children of this Panel are deactivated. * * @param b * Whether the panel should be activated [true] or deactivated * [false]. */ public void setActivated(boolean b) { btnSave.setEnabled(b); btnOpen.setEnabled(b); lblAnalyses.setEnabled(b); comboBox_Analyses.setEnabled(b); lblWorklists.setEnabled(b); comboBox_Worklists.setEnabled(b); btnStartAnalysis.setEnabled(b); codeField.setEnabled(b); } /** * Look up the currently selected data flow analysis and return its name. * * @return The name of the data flow analysis. */ public String getAnalysis() { String analysisName = (String) comboBox_Analyses.getSelectedItem(); return analysisName; } /** * Get the text from the CodeField and return it to the caller. * * @return the text from the CodeField. * */ public String getCode() { return codeField.getCode(); } /** * Look up the currently selected worklist-algorithm and return its name. * * @return The name of the worklist-algorithm. */ public String getWorklist() { String worklistName = (String) comboBox_Worklists.getSelectedItem(); return worklistName; } /** * Look up if a filter is selected and return this value. * * @return [true] if a filter is selected, [false] if not. */ public boolean isFilterSelected() { // TODO return false; } /** * Implementation of an ActionListener which informs the controller, * when the StartAnalysis button has been pressed. * * @author Michael * @see ActionListener */ private class StartAnalysisListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { ctrl.startAnalysis(); } } }
Implementierung/visual-dfa/src/main/java/gui/InputPanel.java
package gui; import java.awt.Font; import java.awt.event.ActionEvent; import java.awt.event.ActionListener; import javax.swing.JPanel; import javax.swing.border.BevelBorder; import javax.swing.border.CompoundBorder; import javax.swing.border.EmptyBorder; import controller.Controller; import java.awt.GridBagLayout; import javax.swing.JButton; import javax.swing.JLabel; import java.awt.GridBagConstraints; import javax.swing.JComboBox; /** * The InputPanel Class contains UI-elements to let the user start a new data * flow analysis. * * @author Michael * */ public class InputPanel extends JPanel { private Controller ctrl; private CodeField codeField; private JButton btnOpen; private JButton btnSave; private JLabel lblAnalyses; private JComboBox<String> comboBox_Analyses; private JLabel lblWorklists; private JComboBox<String> comboBox_Worklists; private JButton btnStartAnalysis; /** * Create the panel. Set the controller, so the ActionListeners can access * it. * * @param ctrl * The Controller to be accessed in case of events. * * @see controller.Controller * @see ActionListener */ public InputPanel(Controller ctrl) { this.ctrl = ctrl; setBorder( new CompoundBorder(new BevelBorder(BevelBorder.RAISED, Colors.GREY_BORDER.getColor(), null, null, null), new EmptyBorder(5, 5, 5, 5))); setBackground(Colors.BACKGROUND.getColor()); GridBagLayout gridBagLayout = new GridBagLayout(); gridBagLayout.columnWidths = new int[] { 0, 0, 0 }; gridBagLayout.rowHeights = new int[] { 0, 0, 0, 0, 0, 0, 0, 0, 0 }; gridBagLayout.columnWeights = new double[] { 1.0, 0.0, Double.MIN_VALUE }; gridBagLayout.rowWeights = new double[] { 0.5, 0.5, 0.5, 0.5, 0.0, 0.5, 0.0, 0.0, 0.5, 0.0, 0.0, 0.5, 0.1, 0.1 }; setLayout(gridBagLayout); JComponentDecorator jCompDecorator = new JComponentDecorator(); JButtonDecorator jBuDecorator = new JButtonDecorator(jCompDecorator); JLabelDecorator jLaDecorator = new JLabelDecorator(jCompDecorator); codeField = new CodeField(true); GridBagConstraints gbc_codeField = GridBagConstraintFactory.getStandardGridBagConstraints(0, 0, 2, 4); add(codeField, gbc_codeField); btnOpen = new JButton(); jBuDecorator.decorateIconButton(btnOpen, "icons/open-folder-outline.png", 0.2, null, "Open ..."); btnOpen.setBackground(Colors.WHITE_BACKGROUND.getColor()); btnOpen.setForeground(Colors.DARK_TEXT.getColor()); GridBagConstraints gbc_btnOpen = GridBagConstraintFactory.getStandardGridBagConstraints(0, 4, 1, 1); gbc_btnOpen.insets.set(gbc_btnOpen.insets.top, gbc_btnOpen.insets.left, gbc_btnOpen.insets.bottom, 0); add(btnOpen, gbc_btnOpen); btnSave = new JButton(); jBuDecorator.decorateIconButton(btnSave, "icons/save-file-option.png", 0.2, null, "Save ..."); btnSave.setBackground(Colors.WHITE_BACKGROUND.getColor()); btnSave.setForeground(Colors.DARK_TEXT.getColor()); GridBagConstraints gbc_btnSave = GridBagConstraintFactory.getStandardGridBagConstraints(1, 4, 1, 1); gbc_btnSave.insets.set(gbc_btnSave.insets.top, 0, gbc_btnSave.insets.bottom, gbc_btnSave.insets.right); add(btnSave, gbc_btnSave); lblAnalyses = new JLabel(); jLaDecorator.decorateLabel(lblAnalyses, "Analysis"); GridBagConstraints gbc_lblAnalyses = GridBagConstraintFactory.getStandardGridBagConstraints(0, 6, 2, 1); add(lblAnalyses, gbc_lblAnalyses); comboBox_Analyses = new JComboBox<String>(ctrl.getAnalyses().toArray(new String[0])); GridBagConstraints gbc_comboBox_Analyses = GridBagConstraintFactory.getStandardGridBagConstraints(0, 7, 2, 1); gbc_comboBox_Analyses.fill = GridBagConstraints.HORIZONTAL; add(comboBox_Analyses, gbc_comboBox_Analyses); lblWorklists = new JLabel(); jLaDecorator.decorateLabel(lblWorklists, "Worklist Algorithm"); GridBagConstraints gbc_lblWorklists = GridBagConstraintFactory.getStandardGridBagConstraints(0, 9, 2, 1); add(lblWorklists, gbc_lblWorklists); comboBox_Worklists = new JComboBox<String>(ctrl.getWorklists().toArray(new String[0])); GridBagConstraints gbc_comboBox_Worklist = GridBagConstraintFactory.getStandardGridBagConstraints(0, 10, 2, 1); gbc_comboBox_Worklist.fill = GridBagConstraints.HORIZONTAL; add(comboBox_Worklists, gbc_comboBox_Worklist); btnStartAnalysis = new JButton(); jBuDecorator.decorateButton(btnStartAnalysis, new StartAnalysisListener(), "Start Analysis"); jBuDecorator.decorateButton(btnStartAnalysis, new StartAnalysisListener(), "Start Analysis"); btnStartAnalysis.setBackground(Colors.GREEN_BACKGROUND.getColor()); btnStartAnalysis.setFont(new Font("Trebuchet MS", Font.BOLD, 24)); GridBagConstraints gbc_btnStartAnalysis = GridBagConstraintFactory.getStandardGridBagConstraints(0, 12, 2, 2); add(btnStartAnalysis, gbc_btnStartAnalysis); } /** * Activate or deactivate the InputPanel. If deactivated, all JComponents * which are children of this Panel are deactivated. * * @param b * Whether the panel should be activated [true] or deactivated * [false]. */ public void setActivated(boolean b) { btnSave.setEnabled(b); btnOpen.setEnabled(b); lblAnalyses.setEnabled(b); comboBox_Analyses.setEnabled(b); lblWorklists.setEnabled(b); comboBox_Worklists.setEnabled(b); btnStartAnalysis.setEnabled(b); codeField.setEnabled(b); } /** * Look up the currently selected data flow analysis and return its name. * * @return The name of the data flow analysis. */ public String getAnalysis() { String analysisName = (String) comboBox_Analyses.getSelectedItem(); return analysisName; } /** * Get the text from the CodeField and return it to the caller. * * @return the text from the CodeField. * */ public String getCode() { return codeField.getCode(); } /** * Look up the currently selected worklist-algorithm and return its name. * * @return The name of the worklist-algorithm. */ public String getWorklist() { String worklistName = (String) comboBox_Worklists.getSelectedItem(); return worklistName; } /** * Look up if a filter is selected and return this value. * * @return [true] if a filter is selected, [false] if not. */ public boolean isFilterSelected() { // TODO return false; } /** * Implementation of an ActionListener which informs the controller, * when the StartAnalysis button has been pressed. * * @author Michael * @see ActionListener */ private class StartAnalysisListener implements ActionListener { @Override public void actionPerformed(ActionEvent e) { ctrl.startAnalysis(); } } }
Delete duplicate line
Implementierung/visual-dfa/src/main/java/gui/InputPanel.java
Delete duplicate line
Java
mit
21a5025f7274d98baa4f89c59fad3ec7c7814bdd
0
hangmann/Temperature-Management-and-Prediction,hangmann/Temperature-Management-and-Prediction,hangmann/Temperature-Management-and-Prediction
TMS_Host/TemperatureViewer/src/controller/C_TimerThread.java
package controller; public class C_TimerThread implements Runnable { int duration = 1000; int maxlevel = 32; C_TemperatureMeasurementSystem c_TMS; int count = 1; public C_TimerThread(int duration, int maxlevel, C_TemperatureMeasurementSystem c_TMS) { this.duration = duration; this.c_TMS = c_TMS; this.maxlevel = maxlevel; } @Override public void run() { while (count<=maxlevel){ try { Thread.sleep(duration); } catch (InterruptedException e) { // TODO Auto-generated catch block e.printStackTrace(); } c_TMS.getHeatControl().setAllHeaters(count); count++; } } }
deletion
TMS_Host/TemperatureViewer/src/controller/C_TimerThread.java
deletion
Java
mit
f4c113eb2a9faa015cbe7fae325df29c51d34af1
0
domisum/AuxiliumLib
package de.domisum.lib.auxilium.util; import de.domisum.lib.auxilium.util.java.annotations.API; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.awt.image.ColorModel; import java.awt.image.DataBufferByte; import java.awt.image.RenderedImage; import java.awt.image.WritableRaster; @API public final class ImageUtil { // INIT private ImageUtil() { throw new UnsupportedOperationException(); } // TO PIXELS @API public static int[][] getPixels(BufferedImage image) { byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); int width = image.getWidth(); int height = image.getHeight(); boolean hasAlphaChannel = image.getAlphaRaster() != null; int[][] result = new int[height][width]; if(hasAlphaChannel) { int pixelLength = 4; int row = 0; int col = 0; for(int pixel = 0; pixel < pixels.length; pixel += pixelLength) { int argb = 0; //argb += ((pixels[pixel]&0xff)<<24); // alpha argb += pixels[pixel+1]&0xff; // blue argb += (pixels[pixel+2]&0xff)<<8; // green argb += (pixels[pixel+3]&0xff)<<16; // red result[row][col] = argb; col++; if(col == width) { col = 0; row++; } } } else { int pixelLength = 3; int row = 0; int col = 0; for(int pixel = 0; pixel < pixels.length; pixel += pixelLength) { int argb = 0; //argb += -16777216; // 255 alpha argb += pixels[pixel]&0xff; // blue argb += (pixels[pixel+1]&0xff)<<8; // green argb += (pixels[pixel+2]&0xff)<<16; // red result[row][col] = argb; col++; if(col == width) { col = 0; row++; } } } return result; } // FROM PIXELS @API public static BufferedImage getImageFromPixels(int[] pixels, int width, int height) { BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); WritableRaster raster = (WritableRaster) bi.getData(); raster.setDataElements(0, 0, width, height, pixels); bi.setData(raster); return bi; } @API public static BufferedImage getImageFromPixels(int[][] pixels) { if((pixels.length == 0) || (pixels[0].length == 0)) throw new IllegalArgumentException("The array has to have at least a length of 1 in each direction"); int height = pixels.length; int width = pixels[0].length; int[] linearPixels = new int[width*height]; for(int i = 0; i < linearPixels.length; i++) { int column = i%width; int row = i/width; linearPixels[i] = pixels[row][column]; } return getImageFromPixels(linearPixels, width, height); } @API public static BufferedImage copy(RenderedImage bufferedImage) { ColorModel colorModel = bufferedImage.getColorModel(); boolean isAlphaPremultiplied = colorModel.isAlphaPremultiplied(); WritableRaster raster = bufferedImage.copyData(null); return new BufferedImage(colorModel, raster, isAlphaPremultiplied, null).getSubimage(0, 0, bufferedImage.getWidth(), bufferedImage.getHeight()); } // COLOR @API public static BufferedImage dye(BufferedImage image, Color color) { BufferedImage graphicsImage = new BufferedImage(image.getWidth(), image.getHeight(), BufferedImage.TYPE_INT_ARGB); Graphics2D graphics2D = graphicsImage.createGraphics(); graphics2D.drawImage(image, 0, 0, null); graphics2D.setComposite(AlphaComposite.SrcAtop); graphics2D.setColor(color); graphics2D.fillRect(0, 0, image.getWidth(), image.getHeight()); return graphicsImage; } @API public static void saturize(BufferedImage image, double saturation) { for(int x = 0; x < image.getWidth(); x++) for(int y = 0; y < image.getHeight(); y++) image.setRGB(x, y, processPixel(image.getRGB(x, y), (float) saturation)); } private static int processPixel(int pixel, float saturation) { int red = 0xff&(pixel >> 16); int green = 0xff&(pixel >> 8); int blue = 0xff&pixel; float[] hsb = Color.RGBtoHSB(red, green, blue, null); hsb[1] += saturation; if(hsb[1] > 1) hsb[1] = 1; int newPixel = Color.HSBtoRGB(hsb[0], hsb[1], hsb[2]); int newRed = 0xff&(newPixel >> 16); int newGreen = 0xff&(newPixel >> 8); int newBlue = 0xff&newPixel; if(newRed > 255) newRed = 255; if(newRed < 0) newRed = 0; if(newGreen > 255) newGreen = 255; if(newGreen < 0) newGreen = 0; if(newBlue > 255) newBlue = 255; if(newBlue < 0) newBlue = 0; return 0xff000000|(newRed<<16)|(newGreen<<8)|newBlue; } }
src/main/java/de/domisum/lib/auxilium/util/ImageUtil.java
package de.domisum.lib.auxilium.util; import de.domisum.lib.auxilium.util.java.annotations.API; import java.awt.AlphaComposite; import java.awt.Color; import java.awt.Graphics2D; import java.awt.image.BufferedImage; import java.awt.image.DataBufferByte; import java.awt.image.WritableRaster; @API public final class ImageUtil { // INIT private ImageUtil() { throw new UnsupportedOperationException(); } // TO PIXELS @API public static int[][] getPixels(BufferedImage image) { byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData(); int width = image.getWidth(); int height = image.getHeight(); boolean hasAlphaChannel = image.getAlphaRaster() != null; int[][] result = new int[height][width]; if(hasAlphaChannel) { int pixelLength = 4; int row = 0; int col = 0; for(int pixel = 0; pixel < pixels.length; pixel += pixelLength) { int argb = 0; //argb += ((pixels[pixel]&0xff)<<24); // alpha argb += pixels[pixel+1]&0xff; // blue argb += (pixels[pixel+2]&0xff)<<8; // green argb += (pixels[pixel+3]&0xff)<<16; // red result[row][col] = argb; col++; if(col == width) { col = 0; row++; } } } else { int pixelLength = 3; int row = 0; int col = 0; for(int pixel = 0; pixel < pixels.length; pixel += pixelLength) { int argb = 0; //argb += -16777216; // 255 alpha argb += pixels[pixel]&0xff; // blue argb += (pixels[pixel+1]&0xff)<<8; // green argb += (pixels[pixel+2]&0xff)<<16; // red result[row][col] = argb; col++; if(col == width) { col = 0; row++; } } } return result; } // FROM PIXELS @API public static BufferedImage getImageFromPixels(int[] pixels, int width, int height) { BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_INT_RGB); WritableRaster raster = (WritableRaster) bi.getData(); raster.setDataElements(0, 0, width, height, pixels); bi.setData(raster); return bi; } @API public static BufferedImage getImageFromPixels(int[][] pixels) { if((pixels.length == 0) || (pixels[0].length == 0)) throw new IllegalArgumentException("The array has to have at least a length of 1 in each direction"); int height = pixels.length; int width = pixels[0].length; int[] linearPixels = new int[width*height]; for(int i = 0; i < linearPixels.length; i++) { int column = i%width; int row = i/width; linearPixels[i] = pixels[row][column]; } return getImageFromPixels(linearPixels, width, height); } // COLOR public static BufferedImage dye(BufferedImage image, Color color) { BufferedImage graphicsImage = new BufferedImage(image.getWidth(), image.getHeight(), BufferedImage.TYPE_INT_ARGB); Graphics2D graphics2D = graphicsImage.createGraphics(); graphics2D.drawImage(image, 0, 0, null); graphics2D.setComposite(AlphaComposite.SrcAtop); graphics2D.setColor(color); graphics2D.fillRect(0, 0, image.getWidth(), image.getHeight()); return graphicsImage; } }
Added saturize and copy
src/main/java/de/domisum/lib/auxilium/util/ImageUtil.java
Added saturize and copy
Java
mit
7e0b7d181eb6752eb6b0400ec9f93608a1e2b48a
0
anotheria/moskito,esmakula/moskito,anotheria/moskito,esmakula/moskito,anotheria/moskito,esmakula/moskito,anotheria/moskito,esmakula/moskito
package net.anotheria.extensions.php.connectors.impl; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonSyntaxException; import com.rabbitmq.client.*; import net.anotheria.extensions.php.connectors.AbstractConnector; import net.anotheria.extensions.php.dto.PHPProducerDTO; import net.anotheria.extensions.php.exceptions.ConnectorInitException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Properties; import java.util.concurrent.TimeoutException; /** * Connector to retrieve data from RabbitMQ. * * Expects that data to this connector will be sent in json * format with strict to {@link net.anotheria.extensions.php.dto.PHPProducerDTO} * class structure * * Has default configuration for rabbitmq host, port, auth credentials * and queue name assuming RabbitMQ instance is running on same host * with out of box configuration and queue name in php agent is default. * * No additional configuration for this connector is needed if * RabbitMQ and php agent instances satisfy requirement mentioned above. */ public class RabbitMQConnector extends AbstractConnector { private final static Logger log = LoggerFactory.getLogger(RabbitMQConnector.class); private static final Gson gson = new GsonBuilder().create(); /** * RabbitMQ connection */ private Connection connection; /** * Channel used by connector to retrieve data */ private Channel channel; private long enabledInTimestamp; @Override public Properties getDefaultProperties() { Properties properties = new Properties(); properties.setProperty("connector.host", "localhost"); properties.setProperty("connector.port", "5672"); properties.setProperty("connector.username", "guest"); properties.setProperty("connector.password", "guest"); properties.setProperty("connector.queue-name", "moskito-php"); return properties; } /** * Opens connection and channel to listen * configured queue for incoming data * @param properties configured connector properties * @throws ConnectorInitException on connection to RabbitMQ fail */ @Override public void initWithDefaultProperties(Properties properties) throws ConnectorInitException { log.debug("Starting to initWithDefaultProperties RabbitMQ connector in php plugin..."); ConnectionFactory factory = new ConnectionFactory(); factory.setHost(properties.getProperty("connector.host")); factory.setPort( Integer.valueOf(properties.getProperty("connector.port")) ); factory.setUsername(properties.getProperty("connector.username")); factory.setPassword(properties.getProperty("connector.password")); try { connection = factory.newConnection(); channel = connection.createChannel(); channel.queueDeclare(properties.getProperty("connector.queue-name"), false, false, false, null); channel.basicConsume( properties.getProperty("connector.queue-name"), true, new MoskitoPHPConsumer(channel) ); enabledInTimestamp = System.currentTimeMillis(); } catch (IOException | TimeoutException e) { deinit(); throw new ConnectorInitException("Failed to open connection to RabbitMQ", e); } } /** * Closes RabbitMQ channel and connection */ public void deinit() { if (channel != null) try { channel.close(); } catch (IOException | TimeoutException e) { log.warn("Failed to close channel in RabbitMQ connector"); } if (connection != null) try { connection.close(); } catch (IOException e) { log.warn("Failed to close connection in RabbitMQ connector"); } } /** * Consumer implementation for passing messages * that parsing incoming json message and pass it * to {@link AbstractConnector#updateProducer(PHPProducerDTO)} */ private class MoskitoPHPConsumer extends DefaultConsumer { /** * Constructs a new instance and records its association to the passed-in channel. * * @param channel the channel to which this consumer is attached */ private MoskitoPHPConsumer(Channel channel) { super(channel); } @Override public void handleDelivery( String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body ) throws IOException { PHPProducerDTO producerDTO; try { producerDTO = gson.fromJson(new String(body, "UTF-8"), PHPProducerDTO.class); } catch (JsonSyntaxException e) { log.error("Failed to parse incoming json data.", e); return; } if((producerDTO.getTimestamp() * 1000) > enabledInTimestamp) { updateProducer(producerDTO); } } } }
moskito-extensions/moskito-php/src/main/java/net/anotheria/extensions/php/connectors/impl/RabbitMQConnector.java
package net.anotheria.extensions.php.connectors.impl; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.rabbitmq.client.*; import net.anotheria.extensions.php.connectors.AbstractConnector; import net.anotheria.extensions.php.dto.PHPProducerDTO; import net.anotheria.extensions.php.exceptions.ConnectorInitException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.util.Properties; import java.util.concurrent.TimeoutException; /** * Connector to retrieve data from RabbitMQ. * * Expects that data to this connector will be sent in json * format with strict to {@link net.anotheria.extensions.php.dto.PHPProducerDTO} * class structure * * Has default configuration for rabbitmq host, port, auth credentials * and queue name assuming RabbitMQ instance is running on same host * with out of box configuration and queue name in php agent is default. * * No additional configuration for this connector is needed if * RabbitMQ and php agent instances satisfy requirement mentioned above. */ public class RabbitMQConnector extends AbstractConnector { private final static Logger log = LoggerFactory.getLogger(RabbitMQConnector.class); private static final Gson gson = new GsonBuilder().create(); /** * RabbitMQ connection */ private Connection connection; /** * Channel used by connector to retrieve data */ private Channel channel; private long enabledInTimestamp; @Override public Properties getDefaultProperties() { Properties properties = new Properties(); properties.setProperty("connector.host", "localhost"); properties.setProperty("connector.port", "5672"); properties.setProperty("connector.username", "guest"); properties.setProperty("connector.password", "guest"); properties.setProperty("connector.queue-name", "moskito-php"); return properties; } /** * Opens connection and channel to listen * configured queue for incoming data * @param properties configured connector properties * @throws ConnectorInitException on connection to RabbitMQ fail */ @Override public void initWithDefaultProperties(Properties properties) throws ConnectorInitException { log.debug("Starting to initWithDefaultProperties RabbitMQ connector in php plugin..."); ConnectionFactory factory = new ConnectionFactory(); factory.setHost(properties.getProperty("connector.host")); factory.setPort( Integer.valueOf(properties.getProperty("connector.port")) ); factory.setUsername(properties.getProperty("connector.username")); factory.setPassword(properties.getProperty("connector.password")); try { connection = factory.newConnection(); channel = connection.createChannel(); channel.queueDeclare(properties.getProperty("connector.queue-name"), false, false, false, null); channel.basicConsume( properties.getProperty("connector.queue-name"), true, new MoskitoPHPConsumer(channel) ); enabledInTimestamp = System.currentTimeMillis(); } catch (IOException | TimeoutException e) { deinit(); throw new ConnectorInitException("Failed to open connection to RabbitMQ", e); } } /** * Closes RabbitMQ channel and connection */ public void deinit() { if (channel != null) try { channel.close(); } catch (IOException | TimeoutException e) { log.warn("Failed to close channel in RabbitMQ connector"); } if (connection != null) try { connection.close(); } catch (IOException e) { log.warn("Failed to close connection in RabbitMQ connector"); } } /** * Consumer implementation for passing messages * that parsing incoming json message and pass it * to {@link AbstractConnector#updateProducer(PHPProducerDTO)} */ private class MoskitoPHPConsumer extends DefaultConsumer { /** * Constructs a new instance and records its association to the passed-in channel. * * @param channel the channel to which this consumer is attached */ private MoskitoPHPConsumer(Channel channel) { super(channel); } @Override public void handleDelivery( String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body ) throws IOException { PHPProducerDTO producerDTO = gson.fromJson(new String(body, "UTF-8"), PHPProducerDTO.class); if((producerDTO.getTimestamp() * 1000) > enabledInTimestamp) { updateProducer(producerDTO); } } } }
catching json exception in RabbitMQ connector
moskito-extensions/moskito-php/src/main/java/net/anotheria/extensions/php/connectors/impl/RabbitMQConnector.java
catching json exception in RabbitMQ connector
Java
mit
bab9bb4dd1b9adbb3c88a1d3a999a99f8ce984b6
0
za419/Android-calculator
package com.Ryan.Calculator; import android.annotation.TargetApi; import android.app.Activity; import android.os.Build; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.EditText; import java.math.BigInteger; public class MainActivity extends Activity { public double currentValue=0; /** Called when the activity is first created. */ @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); if (Build.VERSION.SDK_INT>=Build.VERSION_CODES.HONEYCOMB) { getActionBar().hide(); if (Build.VERSION.SDK_INT>=Build.VERSION_CODES.ICE_CREAM_SANDWICH) findViewById(R.id.mainLayout).setSystemUiVisibility(View.SYSTEM_UI_FLAG_LOW_PROFILE); } setZero(); } public double parseDouble(String num) { if (num==null || num.indexOf("Error", 0)==0 || num.indexOf("ERROR", 0)==0) return 0; if ("Not prime".equals(num) || "Not prime or composite".equals(num)) return 0; if ("Prime".equals(num)) return 1; if (num.charAt(num.length()-1)=='\u03C0') { if (num.length()==1) return Math.PI; else if (num.length()==2 && num.charAt(0)=='-') // If the string is two long and the first character is a negation return -Math.PI; // Return negative pi return parseDouble(num.substring(0, num.length()-1))*Math.PI; } if (num.charAt(num.length()-1)=='e') { if (num.length()==1) return Math.E; else if (num.length()==2 && num.charAt(0)=='-') // If the string is two long and the first character is a negation return -Math.E; // Return negative e return parseDouble(num.substring(0, num.length()-1))*Math.E; } return Double.parseDouble(num); } public String inIntTermsOfPi(double num) { if (num==0) return "0"; double tmp=num/Math.PI; int n=(int)tmp; if (n==tmp) { if (n==-1) // If it is a negative, but otherwise 1 return "-\u03C0"; // Return negative pi return (n==1 ? "" : Integer.toString(n))+"\u03C0"; } else return Double.toString(num); } public String inIntTermsOfE(double num) { if (num==0) return "0"; double tmp=num/Math.E; int n=(int)tmp; if (n==tmp) { if (n==-1) // If it is a negative, but otherwise 1 return "-e"; // Return negative e return (n==1 ? "" : Integer.toString((int)tmp))+"e"; } else return Double.toString(num); } public String inIntTermsOfAny(double num) { if (Double.isNaN(num)) // "Last-resort" check return "ERROR: Nonreal or non-numeric result."; // Trap NaN and return a generic error for it. // Because of that check, we can guarantee that NaN's will not be floating around for more than one expression. String out=inIntTermsOfPi(num); if (!out.equals(Double.toString(num))) return out; else return inIntTermsOfE(num); } public void zero(View v) { setZero(); } public void setZero(EditText ev) { setText("0", ev); } public void setZero() { setZero((EditText) findViewById(R.id.mainTextField)); } public void setText(String n, EditText ev) { ev.setText(n); ev.setSelection(0, n.length()); // Ensure the cursor is at the end } public void setText(String n) { setText(n, (EditText) findViewById(R.id.mainTextField)); } public void terms(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(parseDouble(ev.getText().toString())), ev); } public void decimal(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Double.toString(parseDouble(ev.getText().toString())), ev); } public double getValue(final EditText ev) // Parses the content of ev into a double. { return parseDouble(ev.getText().toString().trim()); } public void doCalculate(final EditText ev, OnClickListener ocl) // Common code for buttons that use the mainCalculateButton. { doCalculate(ev, ocl, 0); } public void doCalculate(final EditText ev, OnClickListener ocl, double n) // Common code for buttons that use the mainCalculateButton, setting the default value to n rather than zero. { setText(inIntTermsOfAny(n), ev); final Button b=(Button)findViewById(R.id.mainCalculateButton); b.setVisibility(View.VISIBLE); b.setOnClickListener(ocl); } public void add(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(currentValue + parseDouble(num)), ev); v.setVisibility(View.GONE); } }); } public void subtract(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(currentValue-parseDouble(num)), ev); v.setVisibility(View.GONE); } }); } public void subtract2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(parseDouble(num)-currentValue), ev); v.setVisibility(View.GONE); } }); } public void multiply(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(currentValue*parseDouble(num)), ev); v.setVisibility(View.GONE); } }); } public void divide(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; double n=parseDouble(num); if (n==0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(currentValue/n), ev); v.setVisibility(View.GONE); } }); } public void divide2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; double n=parseDouble(num); if (n==0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(n/currentValue), ev); v.setVisibility(View.GONE); } }); } public void remainder(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); if (Math.round(currentValue)!=currentValue) { setText("Error: Parameter is not an integer: "+ev.getText(), ev); return; } doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; v.setVisibility(View.GONE); double tmp=parseDouble(num); if (Math.round(tmp)!=tmp) setText("Error: Parameter is not an integer: "+num, ev); else if (Math.round(tmp)==0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(Math.round(currentValue)%Math.round(tmp)), ev); } }); } public void remainder2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); if (Math.round(currentValue)!=currentValue) { setText("Error: Parameter is not an integer: "+ev.getText(), ev); return; } doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString().trim(); if ("".equals(num)) return; v.setVisibility(View.GONE); double tmp = parseDouble(num); if (Math.round(tmp) != tmp) setText("Error: Parameter is not an integer: " + num, ev); else if (Math.round(currentValue) == 0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(Math.round(tmp) % Math.round(currentValue)), ev); } }); } public void e(View v) { setText("e"); } public void pi(View v) { setText("\u03C0"); } public void negate(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(-1 * parseDouble(ev.getText().toString())), ev); } public void sin(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.sin(parseDouble(ev.getText().toString()))), ev); } public void cos(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.cos(parseDouble(ev.getText().toString()))), ev); } public void tan(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.tan(parseDouble(ev.getText().toString()))), ev); } public void arcsin(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.asin(parseDouble(ev.getText().toString()))), ev); } public void arccos(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.acos(parseDouble(ev.getText().toString()))), ev); } public void arctan(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.atan(parseDouble(ev.getText().toString()))), ev); } public void exp(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfE(Math.exp(parseDouble(ev.getText().toString()))), ev); } public void degrees(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(((Double)Math.toDegrees(parseDouble(ev.getText().toString()))).toString(), ev); } public void radians(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.toRadians(parseDouble(ev.getText().toString()))), ev); } public void radians2(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double tmp=parseDouble(ev.getText().toString()); tmp/=180; setText(Double.toString(tmp)+'\u03C0', ev); } public void ln(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfE(Math.log(parseDouble(ev.getText().toString()))), ev); } public void log(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.log10(parseDouble(ev.getText().toString()))), ev); } public void logb(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString(); if ("".equals(num)) return; setText(inIntTermsOfAny(Math.log(currentValue) / Math.log(parseDouble(num))), ev); v.setVisibility(View.GONE); } }, 10); } public void logb2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev,new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString(); if ("".equals(num)) return; setText(inIntTermsOfAny(Math.log(parseDouble(num))/Math.log(currentValue)), ev); v.setVisibility(View.GONE); } }, 10); } public void round(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Long.toString(Math.round(parseDouble(ev.getText().toString())))); } public void sqrt(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double n=parseDouble(ev.getText().toString()); if (n<0) setText("ERROR: Complex result."); else setText(inIntTermsOfAny(Math.sqrt(n)), ev); } public void cbrt(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.cbrt(parseDouble(ev.getText().toString()))), ev); } public void ceil(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Long.toString((long) Math.ceil(parseDouble(ev.getText().toString()))), ev); } public void floor(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Long.toString((long)Math.floor(parseDouble(ev.getText().toString()))), ev); } public void pow(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString(); if (!"".equals(num)) setText(inIntTermsOfAny(Math.pow(currentValue, parseDouble(num))), ev); v.setVisibility(View.GONE); } }, currentValue); } public void pow2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString(); if (!"".equals(num)) setText(inIntTermsOfAny(Math.pow(parseDouble(num), currentValue)), ev); v.setVisibility(View.GONE); } }, currentValue); } public void abs (View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.abs(parseDouble(ev.getText().toString()))), ev); } public void sinh(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.sinh(parseDouble(ev.getText().toString()))), ev); } public void expm(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.expm1(parseDouble(ev.getText().toString()))), ev); } public void cosh(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.cosh(parseDouble(ev.getText().toString()))), ev); } public void tanh(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.tanh(parseDouble(ev.getText().toString()))), ev); } public void lnp(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.log1p(parseDouble(ev.getText().toString()))), ev); } public void square(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); setText(inIntTermsOfAny(num*num), ev); } public void cube(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); setText(inIntTermsOfAny(num*num*num), ev); } public void isPrime(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); int n=(int)Math.floor(num); if (n!=num || n<1 || isDivisible(n,2)) { setText("Not prime"); return; } if (n==1) { setText("Not prime or composite"); return; } for (int i=3; i<=Math.sqrt(n); i+=2) { if (isDivisible(n, i)) { setText("Not prime"); return; } } setText("Prime"); } public boolean isDivisible(int num, int den) { return num%den==0; } public double fastPow(double val, int power) { if (val==2) return fastPow(power).doubleValue(); switch (power) { case 0: return 1; case 1: return val; case 2: return val*val; default: if (power<0) return 1/fastPow(val, -1*power); if (power%2==0) return fastPow(fastPow(val, 2), power>>1); return val*fastPow(val, power-1); } } public BigInteger fastPow(int pow) // 2 as base { return BigInteger.ZERO.flipBit(pow); } public void raise2(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); if (Math.round(num)==num) // Integer power. Use the fastpow() and a BigInteger. setText(fastPow((int)Math.round(num)).toString(), ev); else setText(Double.toString(Math.pow(2, num)), ev); } }
app/src/main/java/com/RyanHodin/Calculator/MainActivity.java
package com.Ryan.Calculator; import android.annotation.TargetApi; import android.app.Activity; import android.os.Build; import android.os.Bundle; import android.view.View; import android.view.View.OnClickListener; import android.widget.Button; import android.widget.EditText; import java.math.BigInteger; public class MainActivity extends Activity { public double currentValue=0; /** Called when the activity is first created. */ @TargetApi(Build.VERSION_CODES.ICE_CREAM_SANDWICH) @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.main); if (Build.VERSION.SDK_INT>=Build.VERSION_CODES.HONEYCOMB) { getActionBar().hide(); if (Build.VERSION.SDK_INT>=Build.VERSION_CODES.ICE_CREAM_SANDWICH) findViewById(R.id.mainLayout).setSystemUiVisibility(View.SYSTEM_UI_FLAG_LOW_PROFILE); } setZero(); } public double parseDouble(String num) { if (num.indexOf("Error", 0)==0 || num.indexOf("ERROR", 0)==0) return 0; if (num.charAt(num.length()-1)=='\u03C0') { if (num.length()==1) return Math.PI; else if (num.length()==2 && num.charAt(0)=='-') // If the string is two long and the first character is a negation return -Math.PI; // Return negative pi return parseDouble(num.substring(0, num.length()-1))*Math.PI; } if (num.charAt(num.length()-1)=='e') { if (num.length()==1) return Math.E; else if (num.length()==2 && num.charAt(0)=='-') // If the string is two long and the first character is a negation return -Math.E; // Return negative e return parseDouble(num.substring(0, num.length()-1))*Math.E; } return Double.parseDouble(num); } public String inIntTermsOfPi(double num) { if (num==0) return "0"; double tmp=num/Math.PI; int n=(int)tmp; if (n==tmp) { if (n==-1) // If it is a negative, but otherwise 1 return "-\u03C0"; // Return negative pi return (n==1 ? "" : Integer.toString(n))+"\u03C0"; } else return Double.toString(num); } public String inIntTermsOfE(double num) { if (num==0) return "0"; double tmp=num/Math.E; int n=(int)tmp; if (n==tmp) { if (n==-1) // If it is a negative, but otherwise 1 return "-e"; // Return negative e return (n==1 ? "" : Integer.toString((int)tmp))+"e"; } else return Double.toString(num); } public String inIntTermsOfAny(double num) { if (Double.isNaN(num)) // "Last-resort" check return "ERROR: Nonreal or non-numeric result."; // Trap NaN and return a generic error for it. // Because of that check, we can guarantee that NaN's will not be floating around for more than one expression. String out=inIntTermsOfPi(num); if (!out.equals(Double.toString(num))) return out; else return inIntTermsOfE(num); } public void zero(View v) { setZero(); } public void setZero(EditText ev) { setText("0", ev); } public void setZero() { setZero((EditText) findViewById(R.id.mainTextField)); } public void setText(String n, EditText ev) { ev.setText(n); ev.setSelection(0, n.length()); // Ensure the cursor is at the end } public void setText(String n) { setText(n, (EditText) findViewById(R.id.mainTextField)); } public void terms(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(parseDouble(ev.getText().toString())), ev); } public void decimal(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Double.toString(parseDouble(ev.getText().toString())), ev); } public double getValue(final EditText ev) // Parses the content of ev into a double. { return parseDouble(ev.getText().toString().trim()); } public void doCalculate(final EditText ev, OnClickListener ocl) // Common code for buttons that use the mainCalculateButton. { doCalculate(ev, ocl, 0); } public void doCalculate(final EditText ev, OnClickListener ocl, double n) // Common code for buttons that use the mainCalculateButton, setting the default value to n rather than zero. { setText(inIntTermsOfAny(n), ev); final Button b=(Button)findViewById(R.id.mainCalculateButton); b.setVisibility(View.VISIBLE); b.setOnClickListener(ocl); } public void add(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(currentValue + parseDouble(num)), ev); v.setVisibility(View.GONE); } }); } public void subtract(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(currentValue-parseDouble(num)), ev); v.setVisibility(View.GONE); } }); } public void subtract2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(parseDouble(num)-currentValue), ev); v.setVisibility(View.GONE); } }); } public void multiply(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; setText(inIntTermsOfAny(currentValue*parseDouble(num)), ev); v.setVisibility(View.GONE); } }); } public void divide(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; double n=parseDouble(num); if (n==0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(currentValue/n), ev); v.setVisibility(View.GONE); } }); } public void divide2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; double n=parseDouble(num); if (n==0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(n/currentValue), ev); v.setVisibility(View.GONE); } }); } public void remainder(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); if (Math.round(currentValue)!=currentValue) { setText("Error: Parameter is not an integer: "+ev.getText(), ev); return; } doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString().trim(); if ("".equals(num)) return; v.setVisibility(View.GONE); double tmp=parseDouble(num); if (Math.round(tmp)!=tmp) setText("Error: Parameter is not an integer: "+num, ev); else if (Math.round(tmp)==0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(Math.round(currentValue)%Math.round(tmp)), ev); } }); } public void remainder2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=getValue(ev); if (Math.round(currentValue)!=currentValue) { setText("Error: Parameter is not an integer: "+ev.getText(), ev); return; } doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString().trim(); if ("".equals(num)) return; v.setVisibility(View.GONE); double tmp = parseDouble(num); if (Math.round(tmp) != tmp) setText("Error: Parameter is not an integer: " + num, ev); else if (Math.round(currentValue) == 0) setText("Error: Divide by zero."); else setText(inIntTermsOfAny(Math.round(tmp) % Math.round(currentValue)), ev); } }); } public void e(View v) { setText("e"); } public void pi(View v) { setText("\u03C0"); } public void negate(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(-1 * parseDouble(ev.getText().toString())), ev); } public void sin(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.sin(parseDouble(ev.getText().toString()))), ev); } public void cos(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.cos(parseDouble(ev.getText().toString()))), ev); } public void tan(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.tan(parseDouble(ev.getText().toString()))), ev); } public void arcsin(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.asin(parseDouble(ev.getText().toString()))), ev); } public void arccos(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.acos(parseDouble(ev.getText().toString()))), ev); } public void arctan(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.atan(parseDouble(ev.getText().toString()))), ev); } public void exp(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfE(Math.exp(parseDouble(ev.getText().toString()))), ev); } public void degrees(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(((Double)Math.toDegrees(parseDouble(ev.getText().toString()))).toString(), ev); } public void radians(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfPi(Math.toRadians(parseDouble(ev.getText().toString()))), ev); } public void radians2(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double tmp=parseDouble(ev.getText().toString()); tmp/=180; setText(Double.toString(tmp)+'\u03C0', ev); } public void ln(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfE(Math.log(parseDouble(ev.getText().toString()))), ev); } public void log(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.log10(parseDouble(ev.getText().toString()))), ev); } public void logb(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString(); if ("".equals(num)) return; setText(inIntTermsOfAny(Math.log(currentValue) / Math.log(parseDouble(num))), ev); v.setVisibility(View.GONE); } }, 10); } public void logb2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev,new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString(); if ("".equals(num)) return; setText(inIntTermsOfAny(Math.log(parseDouble(num))/Math.log(currentValue)), ev); v.setVisibility(View.GONE); } }, 10); } public void round(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Long.toString(Math.round(parseDouble(ev.getText().toString())))); } public void sqrt(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double n=parseDouble(ev.getText().toString()); if (n<0) setText("ERROR: Complex result."); else setText(inIntTermsOfAny(Math.sqrt(n)), ev); } public void cbrt(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.cbrt(parseDouble(ev.getText().toString()))), ev); } public void ceil(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Long.toString((long) Math.ceil(parseDouble(ev.getText().toString()))), ev); } public void floor(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(Long.toString((long)Math.floor(parseDouble(ev.getText().toString()))), ev); } public void pow(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num = ev.getText().toString(); if (!"".equals(num)) setText(inIntTermsOfAny(Math.pow(currentValue, parseDouble(num))), ev); v.setVisibility(View.GONE); } }, currentValue); } public void pow2(View v) { final EditText ev=(EditText)findViewById(R.id.mainTextField); currentValue=parseDouble(ev.getText().toString()); doCalculate(ev, new OnClickListener() { @Override public void onClick(View v) { v.setOnClickListener(null); String num=ev.getText().toString(); if (!"".equals(num)) setText(inIntTermsOfAny(Math.pow(parseDouble(num), currentValue)), ev); v.setVisibility(View.GONE); } }, currentValue); } public void abs (View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.abs(parseDouble(ev.getText().toString()))), ev); } public void sinh(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.sinh(parseDouble(ev.getText().toString()))), ev); } public void expm(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.expm1(parseDouble(ev.getText().toString()))), ev); } public void cosh(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.cosh(parseDouble(ev.getText().toString()))), ev); } public void tanh(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.tanh(parseDouble(ev.getText().toString()))), ev); } public void lnp(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); setText(inIntTermsOfAny(Math.log1p(parseDouble(ev.getText().toString()))), ev); } public void square(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); setText(inIntTermsOfAny(num*num), ev); } public void cube(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); setText(inIntTermsOfAny(num*num*num), ev); } public void isPrime(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); int n=(int)Math.floor(num); if (n!=num || n<1 || isDivisible(n,2)) { setText("Not prime"); return; } if (n==1) { setText("Not prime or composite"); return; } for (int i=3; i<=Math.sqrt(n); i+=2) { if (isDivisible(n, i)) { setText("Not prime"); return; } } setText("Prime"); } public boolean isDivisible(int num, int den) { return num%den==0; } public double fastPow(double val, int power) { if (val==2) return fastPow(power).doubleValue(); switch (power) { case 0: return 1; case 1: return val; case 2: return val*val; default: if (power<0) return 1/fastPow(val, -1*power); if (power%2==0) return fastPow(fastPow(val, 2), power>>1); return val*fastPow(val, power-1); } } public BigInteger fastPow(int pow) // 2 as base { return BigInteger.ZERO.flipBit(pow); } public void raise2(View v) { EditText ev=(EditText)findViewById(R.id.mainTextField); double num=parseDouble(ev.getText().toString()); if (Math.round(num)==num) // Integer power. Use the fastpow() and a BigInteger. setText(fastPow((int)Math.round(num)).toString(), ev); else setText(Double.toString(Math.pow(2, num)), ev); } }
Add checks for prime? result strings - Fixes #14 Also add a null trap to parseDouble()
app/src/main/java/com/RyanHodin/Calculator/MainActivity.java
Add checks for prime? result strings - Fixes #14 Also add a null trap to parseDouble()
Java
mit
02bcffb46b7474a80c949907c78bf80488470b2c
0
CjHare/systematic-trading
/** * Copyright (c) 2015-2017, CJ Hare All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * * Neither the name of [project] nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.systematic.trading.signals.indicator; import java.math.BigDecimal; import java.math.MathContext; import java.time.LocalDate; import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; import com.systematic.trading.data.TradingDayPrices; import com.systematic.trading.maths.SignalType; import com.systematic.trading.maths.indicator.IllegalArgumentThrowingValidator; import com.systematic.trading.maths.indicator.sma.SimpleMovingAverage; import com.systematic.trading.maths.indicator.sma.SimpleMovingAverageCalculator; import com.systematic.trading.signal.IndicatorSignalType; import com.systematic.trading.signals.filter.InclusiveDatelRangeFilter; import com.systematic.trading.signals.filter.SignalRangeFilter; /** * The Simple Moving Average (SMA) gradient, whether it is negative (downward),flat * (no change) or positive (upward). * * @author CJ Hare */ public class SimpleMovingAverageGradientSignals implements IndicatorSignalGenerator { //TODO this is confusing, split up like the RSI, bullish / bearish indicators /** * Trigger gradient for a bullish signal. */ public enum GradientType { NEGATIVE, FLAT, POSITIVE } /** Scale and precision to apply to mathematical operations. */ private final MathContext mathContext; /** Provides date range filtering. */ private final InclusiveDatelRangeFilter dateRangeFilter = new InclusiveDatelRangeFilter(); /** On which type of gradient does a signal get generated. */ private final GradientType signalGenerated; /** Number of days to average the value on. */ private final int lookback; /** The number of days the SMA gradient covers. */ private final int daysOfGradient; /** Responsible for calculating the simple moving average. */ private final SimpleMovingAverage movingAverage; /** Range of signal dates of interest. */ private final SignalRangeFilter signalRangeFilter; public SimpleMovingAverageGradientSignals( final int lookback, final int daysOfGradient, final GradientType signalGenerated, final SignalRangeFilter filter, final MathContext mathContext ) { this(lookback, daysOfGradient, signalGenerated, filter, mathContext, new SimpleMovingAverageCalculator(lookback, daysOfGradient, new IllegalArgumentThrowingValidator(), mathContext)); } private SimpleMovingAverageGradientSignals( final int lookback, final int daysOfGradient, final GradientType signalGenerated, final SignalRangeFilter filter, final MathContext mathContext, final SimpleMovingAverageCalculator movingAverage ) { this.signalGenerated = signalGenerated; this.daysOfGradient = daysOfGradient; this.movingAverage = movingAverage; this.mathContext = mathContext; this.lookback = lookback; this.signalRangeFilter = filter; } @Override public List<IndicatorSignal> calculateSignals( final TradingDayPrices[] data ) { //TODO validate the number of data items meets the minimum final Predicate<LocalDate> signalRange = candidate -> dateRangeFilter.isWithinSignalRange( signalRangeFilter.getEarliestSignalDate(data), signalRangeFilter.getLatestSignalDate(data), candidate); final List<BigDecimal> sma = movingAverage.sma(data); // Only look at the gradient if there's more than one sma result if (!sma.isEmpty()) { return analyseGradient(data, sma, signalRange); } return new ArrayList<>(); } private List<IndicatorSignal> analyseGradient( final TradingDayPrices[] data, final List<BigDecimal> sma, final Predicate<LocalDate> signalRange ) { final List<IndicatorSignal> signals = new ArrayList<>(); // We're only using the right most values of the data final int offset = data.length - sma.size(); // Start with the first value, bump the index BigDecimal previous = sma.get(0); for (int index = 1; index < sma.size(); index++) { final LocalDate today = data[index + offset].getDate(); if (signalRange.test(today)) { //TODO generate the down signals too switch (signalGenerated) { case POSITIVE: if (isPositiveGardient(previous, sma.get(index))) { signals.add(new IndicatorSignal(today, IndicatorSignalType.SMA, SignalType.BULLISH)); } break; case FLAT: if (isFlatGardient(previous, sma.get(index))) { signals.add(new IndicatorSignal(today, IndicatorSignalType.SMA, SignalType.BULLISH)); } break; case NEGATIVE: if (isNegativeGardient(previous, sma.get(index))) { signals.add(new IndicatorSignal(today, IndicatorSignalType.SMA, SignalType.BULLISH)); } break; default: throw new IllegalArgumentException(String.format("%s enum is unexpected", signalGenerated)); } } previous = sma.get(index); } return signals; } private boolean isPositiveGardient( final BigDecimal previous, final BigDecimal current ) { return current.subtract(previous, mathContext).compareTo(BigDecimal.ZERO) > 0; } private boolean isNegativeGardient( final BigDecimal previous, final BigDecimal current ) { return current.subtract(previous, mathContext).compareTo(BigDecimal.ZERO) < 0; } private boolean isFlatGardient( final BigDecimal previous, final BigDecimal current ) { return current.subtract(previous, mathContext).compareTo(BigDecimal.ZERO) == 0; } @Override public int getRequiredNumberOfTradingDays() { return lookback + daysOfGradient; } @Override public IndicatorSignalType getSignalType() { return IndicatorSignalType.SMA; } }
systematic-trading-signals/src/main/java/com/systematic/trading/signals/indicator/SimpleMovingAverageGradientSignals.java
/** * Copyright (c) 2015-2017, CJ Hare All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, are permitted * provided that the following conditions are met: * * * Redistributions of source code must retain the above copyright notice, this list of conditions * and the following disclaimer. * * * Redistributions in binary form must reproduce the above copyright notice, this list of * conditions and the following disclaimer in the documentation and/or other materials provided with * the distribution. * * * Neither the name of [project] nor the names of its contributors may be used to endorse or * promote products derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND * FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY * WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ package com.systematic.trading.signals.indicator; import java.math.BigDecimal; import java.math.MathContext; import java.time.LocalDate; import java.util.ArrayList; import java.util.List; import java.util.function.Predicate; import com.systematic.trading.data.TradingDayPrices; import com.systematic.trading.maths.SignalType; import com.systematic.trading.maths.indicator.IllegalArgumentThrowingValidator; import com.systematic.trading.maths.indicator.sma.SimpleMovingAverage; import com.systematic.trading.maths.indicator.sma.SimpleMovingAverageCalculator; import com.systematic.trading.signal.IndicatorSignalType; import com.systematic.trading.signals.filter.InclusiveDatelRangeFilter; import com.systematic.trading.signals.filter.SignalRangeFilter; /** * The Simple Moving Average (SMA) gradient, whether it is negative (downward),flat * (no change) or positive (upward). * * @author CJ Hare */ public class SimpleMovingAverageGradientSignals implements IndicatorSignalGenerator { //TODO replace this class with SAM & EMA signals, supported using the origin, instead of inter-day gradient /** * Trigger gradient for a bullish signal. */ public enum GradientType { NEGATIVE, FLAT, POSITIVE } /** Scale and precision to apply to mathematical operations. */ private final MathContext mathContext; /** Provides date range filtering. */ private final InclusiveDatelRangeFilter dateRangeFilter = new InclusiveDatelRangeFilter(); /** On which type of gradient does a signal get generated. */ private final GradientType signalGenerated; /** Number of days to average the value on. */ private final int lookback; /** The number of days the SMA gradient covers. */ private final int daysOfGradient; /** Responsible for calculating the simple moving average. */ private final SimpleMovingAverage movingAverage; /** Range of signal dates of interest. */ private final SignalRangeFilter signalRangeFilter; public SimpleMovingAverageGradientSignals( final int lookback, final int daysOfGradient, final GradientType signalGenerated, final SignalRangeFilter filter, final MathContext mathContext ) { this(lookback, daysOfGradient, signalGenerated, filter, mathContext, new SimpleMovingAverageCalculator(lookback, daysOfGradient, new IllegalArgumentThrowingValidator(), mathContext)); } private SimpleMovingAverageGradientSignals( final int lookback, final int daysOfGradient, final GradientType signalGenerated, final SignalRangeFilter filter, final MathContext mathContext, final SimpleMovingAverageCalculator movingAverage ) { this.signalGenerated = signalGenerated; this.daysOfGradient = daysOfGradient; this.movingAverage = movingAverage; this.mathContext = mathContext; this.lookback = lookback; this.signalRangeFilter = filter; } @Override public List<IndicatorSignal> calculateSignals( final TradingDayPrices[] data ) { //TODO validate the number of data items meets the minimum final Predicate<LocalDate> signalRange = candidate -> dateRangeFilter.isWithinSignalRange( signalRangeFilter.getEarliestSignalDate(data), signalRangeFilter.getLatestSignalDate(data), candidate); final List<BigDecimal> sma = movingAverage.sma(data); // Only look at the gradient if there's more than one sma result if (!sma.isEmpty()) { return analyseGradient(data, sma, signalRange); } return new ArrayList<>(); } private List<IndicatorSignal> analyseGradient( final TradingDayPrices[] data, final List<BigDecimal> sma, final Predicate<LocalDate> signalRange ) { final List<IndicatorSignal> signals = new ArrayList<>(); // We're only using the right most values of the data final int offset = data.length - sma.size(); // Start with the first value, bump the index BigDecimal previous = sma.get(0); for (int index = 1; index < sma.size(); index++) { final LocalDate today = data[index + offset].getDate(); if (signalRange.test(today)) { //TODO generate the down signals too switch (signalGenerated) { case POSITIVE: if (isPositiveGardient(previous, sma.get(index))) { signals.add(new IndicatorSignal(today, IndicatorSignalType.SMA, SignalType.BULLISH)); } break; case FLAT: if (isFlatGardient(previous, sma.get(index))) { signals.add(new IndicatorSignal(today, IndicatorSignalType.SMA, SignalType.BULLISH)); } break; case NEGATIVE: if (isNegativeGardient(previous, sma.get(index))) { signals.add(new IndicatorSignal(today, IndicatorSignalType.SMA, SignalType.BULLISH)); } break; default: throw new IllegalArgumentException(String.format("%s enum is unexpected", signalGenerated)); } } previous = sma.get(index); } return signals; } private boolean isPositiveGardient( final BigDecimal previous, final BigDecimal current ) { return current.subtract(previous, mathContext).compareTo(BigDecimal.ZERO) > 0; } private boolean isNegativeGardient( final BigDecimal previous, final BigDecimal current ) { return current.subtract(previous, mathContext).compareTo(BigDecimal.ZERO) < 0; } private boolean isFlatGardient( final BigDecimal previous, final BigDecimal current ) { return current.subtract(previous, mathContext).compareTo(BigDecimal.ZERO) == 0; } @Override public int getRequiredNumberOfTradingDays() { return lookback + daysOfGradient; } @Override public IndicatorSignalType getSignalType() { return IndicatorSignalType.SMA; } }
Updating TODOs
systematic-trading-signals/src/main/java/com/systematic/trading/signals/indicator/SimpleMovingAverageGradientSignals.java
Updating TODOs
Java
mit
6734fac09dfe58da035acbc2b2bf92daec07f5f1
0
fvasquezjatar/fermat-unused,fvasquezjatar/fermat-unused
/* * @(#DeveloperIdentityDao.java 07/16/2015 * Copyright 2015 bitDubai, Inc. All rights reserved. * BITDUBAI/CONFIDENTIAL * */ package com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.structure; // Packages and classes to import of jdk 1.7 import java.util.*; // Packages and classes to import of fermat api import com.bitdubai.fermat_api.DealsWithPluginIdentity; import com.bitdubai.fermat_api.FermatException; import com.bitdubai.fermat_api.layer.all_definition.crypto.asymmetric.ECCKeyPair; import com.bitdubai.fermat_api.layer.dmp_basic_wallet.bitcoin_wallet.exceptions.CantInitializeBitcoinWalletBasicException; import com.bitdubai.fermat_api.layer.osa_android.database_system.Database; import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseFilterType; import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTable; import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTableRecord; import com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPluginDatabaseSystem; import com.bitdubai.fermat_api.layer.osa_android.database_system.PluginDatabaseSystem; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantCreateDatabaseException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantOpenDatabaseException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.DatabaseNotFoundException; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager; import com.bitdubai.fermat_pip_api.layer.pip_identity.developer.interfaces.DeveloperIdentity; import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.interfaces.DeviceUser; import com.bitdubai.fermat_pip_api.layer.pip_identity.developer.exceptions.CantGetUserDeveloperIdentitiesException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantLoadTableToMemoryException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantInsertRecordException; import com.bitdubai.fermat_pip_api.layer.pip_identity.developer.exceptions.CantCreateNewDeveloperException; import com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.DeveloperIdentityPluginRoot; import com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.exceptions.CantInitializeDeveloperIdentityDatabaseException; // Packages and classes to import of apache commons. import static org.apache.commons.lang3.StringUtils.isEmpty; /** * The Class <code>com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.structure.DeveloperIdentityDao</code> * all methods implementation to access the data base<p/> * <p/> * * Created by Leon Acosta - ([email protected]) on 14/07/15. * Updated by Raul Pena - ([email protected]) on 16/07/15. * * @version 1.0 * @since Java JDK 1.7 */ public class DeveloperIdentityDao implements DealsWithPluginDatabaseSystem, DealsWithPluginIdentity { // Private instance fields declarations. // DealsWithPluginDatabaseSystem Interface member variables. private PluginDatabaseSystem pluginDatabaseSystem = null; // Database factory private DeveloperIdentityDatabaseFactory databaseFactory = null; // Database object. private Database dataBase = null; private UUID pluginId = null; // DealsWithlogManager interface member variable. private LogManager logManager = null; // Private class fields declarations. // Blank target. private static final String _DEFAUL_STRING = ""; // Public constructor declarations. /** * * <p>Constructor without parameters. * * * */ public DeveloperIdentityDao () { // Call to super class. super (); } /** * * <p>Constructor with parameters. * * @param pluginDatabaseSystem * @param databaseFactory * @param pluginId * * */ public DeveloperIdentityDao (PluginDatabaseSystem pluginDatabaseSystem, DeveloperIdentityDatabaseFactory databaseFactory, UUID pluginId,LogManager logManager) { // Call to super class. super (); // Set internal values. this.pluginDatabaseSystem = pluginDatabaseSystem; this.databaseFactory = databaseFactory; this.pluginId = pluginId; this.logManager = logManager; } // Private instance methods declarations. /* * * <p>Method that check if alias exists. * * @param alias * @return Boolean that indicate if the alias exists or not. * */ private boolean aliasExists (String alias) throws CantCreateNewDeveloperException { // Setup method. DatabaseTable table; // Developer table. // Check the arguments. if (isEmpty (alias)) { // Cancel the process. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Alias is empty, arguments are null or empty.", _DEFAUL_STRING, _DEFAUL_STRING); return Boolean.FALSE; } if (this.dataBase == null) { // Cancel the process. throw new CantCreateNewDeveloperException ("Cant check if alias exists or not, Database is closed o null.", "Plugin Identity", "Cant check if alias exists or not, Database is closed o null."); } // Get developers identities list. try { logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Checking if alias " + alias + " exists.", _DEFAUL_STRING, _DEFAUL_STRING); // 1) Get the table. logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); table = this.dataBase.getTable (DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME); if (table == null) { // Table not found. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Cant check if alias exists, table not " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " found.", _DEFAUL_STRING, _DEFAUL_STRING); throw new CantGetUserDeveloperIdentitiesException("Cant check if alias exists, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found.", "Plugin Identity", "Cant check if alias exists, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found."); } // 2) Find the developers. logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Applying filter to " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table by developer alias key [" + alias.trim() + "].", _DEFAUL_STRING, _DEFAUL_STRING); table.setStringFilter(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_ALIAS_COLUMN_NAME, alias, DatabaseFilterType.EQUAL); table.loadToMemory(); // 3) Get developers. logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Developer alias found (" + table.getRecords().size() + ") by alias [" + alias + "].", _DEFAUL_STRING, _DEFAUL_STRING); return table.getRecords ().size () > 0; } catch (CantLoadTableToMemoryException em) { // Failure unknown. throw new CantCreateNewDeveloperException (em.getMessage(), em, "Plugin Identity", "Cant load " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table in memory."); } catch (Exception e) { // Failure unknown. throw new CantCreateNewDeveloperException (e.getMessage(), e, "Plugin Identity", "Cant check if alias exists, unknown failure."); } } // Public instance methods declarations extends of com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPluginDatabaseSystem /** * DealsWithPluginDatabaseSystem Interface implementation. */ @Override public void setPluginDatabaseSystem (PluginDatabaseSystem pluginDatabaseSystem) { // Set internal values. this.pluginDatabaseSystem = pluginDatabaseSystem; } /** * DealsWithPluginIdentity Interface implementation. */ @Override public void setPluginId (UUID pluginId) { // Set value. this.pluginId = pluginId; } // Public instance methods declarations. /** * * <p>Method tha set the Database factory. * */ public void setDeveloperIdentityDatabaseFactory (DeveloperIdentityDatabaseFactory databaseFactory) { // Set the value. this.databaseFactory = databaseFactory; } /** * This method open or creates the database i'll be working with. * * @param ownerId plugin id * @throws CantInitializeDeveloperIdentityDatabaseException */ public void initializeDatabase (UUID ownerId) throws CantInitializeDeveloperIdentityDatabaseException { // Check the arguments. if (ownerId == null) { // Cancel the process. throw new CantInitializeDeveloperIdentityDatabaseException ("Cant create database, arguments are null or empty.", "Plugin Identity", "Cant create database, arguments are null or empty."); } // Create the database. try { logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Initializing identity database system...", _DEFAUL_STRING, _DEFAUL_STRING); this.databaseFactory.setPluginDatabaseSystem(this.pluginDatabaseSystem); //Check the database exist this.dataBase = pluginDatabaseSystem.openDatabase(pluginId, DeveloperIdentityDatabaseConstants.DEVELOPER_DB_NAME); } catch (DatabaseNotFoundException databaseNotFoundException) { try{ this.dataBase = this.databaseFactory.createDatabase (ownerId, DeveloperIdentityDatabaseConstants.DEVELOPER_DB_NAME ); } catch (CantCreateDatabaseException e) { throw new CantInitializeDeveloperIdentityDatabaseException(e.getMessage(), e, "Plugin Identity", "Cant create database."); } } catch (CantOpenDatabaseException cantOpenDatabaseException){ throw new CantInitializeDeveloperIdentityDatabaseException(cantOpenDatabaseException.getMessage(), cantOpenDatabaseException, "Plugin Identity", "Cant create database."); } catch (Exception e) { // Failure unknown. throw new CantInitializeDeveloperIdentityDatabaseException (e.getMessage(), e, "Plugin Identity", "Cant create database, unknown failure."); } } /** * Method that create a new developer in the database. * * @param alias alias of developer * @param developerKeyPair new private and public key for the developer * @param deviceUser logged in device user * @return DeveloperIdentity * @throws CantCreateNewDeveloperException */ public DeveloperIdentity createNewDeveloper (String alias, ECCKeyPair developerKeyPair, DeviceUser deviceUser) throws CantCreateNewDeveloperException { // Check the arguments. if (developerKeyPair == null || isEmpty (alias) || deviceUser == null) { // Cancel the process. throw new CantCreateNewDeveloperException ("Cant create new developer, arguments are null or empty.", "Plugin Identity", "Cant create database, arguments are null or empty."); } if (this.dataBase == null) { // Cancel the process. throw new CantCreateNewDeveloperException ("Cant create new developer, Database is closed o null.", "Plugin Identity", "Cant create new developer, Database is closed o null."); } // Create the new developer. try { logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Initializing developer record creation.", _DEFAUL_STRING, _DEFAUL_STRING); if (aliasExists (alias)) { throw new CantCreateNewDeveloperException ("Cant create new developer, alias exists.", "Plugin Identity", "Cant create new developer, alias exists."); } logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); DatabaseTable table = this.dataBase.getTable (DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME); DatabaseTableRecord record = table.getEmptyRecord (); logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PUBLIC_KEY_COLUMN_NAME, developerKeyPair.getPublicKey()); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PRIVATE_KEY_COLUMN_NAME, developerKeyPair.getPrivateKey()); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVICE_USER_PUBLIC_KEY_COLUMN_NAME, deviceUser.getPublicKey()); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_ALIAS_COLUMN_NAME, alias);//deviceUser.getAlias() // logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Inserting [Alias=" + deviceUser.getAlias() + ", PK=" + developerKeyPair.getPublicKey() + "] in " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); table.insertRecord(record); // logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "New developer record created [" + developerKeyPair.getPublicKey() + "]", _DEFAUL_STRING, _DEFAUL_STRING); } catch (CantInsertRecordException e){ // Cant insert record. throw new CantCreateNewDeveloperException (e.getMessage(), e, "Plugin Identity", "Cant create new developer, insert database problems."); } catch (Exception e) { // Failure unknown. throw new CantCreateNewDeveloperException (e.getMessage(), e, "Plugin Identity", "Cant create new developer, unknown failure."); } // Return the new developer. return new DeveloperIdentityRecord (alias,developerKeyPair.getPublicKey(),developerKeyPair.getPrivateKey()); } /** * Method that list the developers related to the parametrized device user. * * @param deviceUser device user * @throws CantGetUserDeveloperIdentitiesException */ public List<DeveloperIdentity> getDevelopersFromCurrentDeviceUser (DeviceUser deviceUser) throws CantGetUserDeveloperIdentitiesException { // Setup method. List<DeveloperIdentity> list = new ArrayList<DeveloperIdentity> (); // Developer list. DatabaseTable table; // Developer table. // Check the arguments. if (deviceUser == null) { // Cancel the process. throw new CantGetUserDeveloperIdentitiesException ("Cant get developers from current device, arguments are null or empty.", "Plugin Identity", "Cant get developers from current device, arguments are null or empty."); } if (this.dataBase == null) { // Cancel the process. throw new CantGetUserDeveloperIdentitiesException ("Cant get developers from current device, Database is closed o null.", "Plugin Identity", "Cant get developers from current device, Database is closed o null."); } // Get developers identities list. try { logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting developer list.", _DEFAUL_STRING, _DEFAUL_STRING); // 1) Get the table. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); table = this.dataBase.getTable (DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME); if (table == null) { // Table not found. throw new CantGetUserDeveloperIdentitiesException ("Cant get developer identity list, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found.", "Plugin Identity", "Cant get developer identity list, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found."); } // 2) Find the developers. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Applying filter to " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table by developer public key [" + deviceUser.getPublicKey() + "].", _DEFAUL_STRING, _DEFAUL_STRING); table.setStringFilter(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PUBLIC_KEY_COLUMN_NAME, deviceUser.getPublicKey(), DatabaseFilterType.EQUAL); table.loadToMemory(); // 3) Get developers. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Developer identity found (" + table.getRecords ().size () + ") by public key [" + deviceUser.getPublicKey () + "].", _DEFAUL_STRING, _DEFAUL_STRING); for (DatabaseTableRecord record : table.getRecords ()) { // Add records to list. list.add(new DeveloperIdentityRecord (record.getStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_ALIAS_COLUMN_NAME), record.getStringValue (DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PUBLIC_KEY_COLUMN_NAME),record.getStringValue (DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PRIVATE_KEY_COLUMN_NAME))); } } catch (CantLoadTableToMemoryException em) { // Failure unknown. throw new CantGetUserDeveloperIdentitiesException (em.getMessage(), em, "Plugin Identity", "Cant load " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table in memory."); } catch (Exception e) { // Failure unknown. throw new CantGetUserDeveloperIdentitiesException (e.getMessage(), FermatException.wrapException(e), "Plugin Identity", "Cant get developer identity list, unknown failure."); } // Return the list values. return list; } }
PIP/plugin/identity/fermat-pip-plugin-identity-developer-bitdubai/src/main/java/com/bitdubai/fermat_pip_plugin/layer/identity/developer/developer/bitdubai/version_1/structure/DeveloperIdentityDao.java
/* * @(#DeveloperIdentityDao.java 07/16/2015 * Copyright 2015 bitDubai, Inc. All rights reserved. * BITDUBAI/CONFIDENTIAL * */ package com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.structure; // Packages and classes to import of jdk 1.7 import java.util.*; // Packages and classes to import of fermat api import com.bitdubai.fermat_api.DealsWithPluginIdentity; import com.bitdubai.fermat_api.layer.all_definition.crypto.asymmetric.ECCKeyPair; import com.bitdubai.fermat_api.layer.osa_android.database_system.Database; import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseFilterType; import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTable; import com.bitdubai.fermat_api.layer.osa_android.database_system.DatabaseTableRecord; import com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPluginDatabaseSystem; import com.bitdubai.fermat_api.layer.osa_android.database_system.PluginDatabaseSystem; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantCreateDatabaseException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantOpenDatabaseException; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogLevel; import com.bitdubai.fermat_api.layer.osa_android.logger_system.LogManager; import com.bitdubai.fermat_pip_api.layer.pip_identity.developer.interfaces.DeveloperIdentity; import com.bitdubai.fermat_pip_api.layer.pip_user.device_user.interfaces.DeviceUser; import com.bitdubai.fermat_pip_api.layer.pip_identity.developer.exceptions.CantGetUserDeveloperIdentitiesException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantLoadTableToMemoryException; import com.bitdubai.fermat_api.layer.osa_android.database_system.exceptions.CantInsertRecordException; import com.bitdubai.fermat_pip_api.layer.pip_identity.developer.exceptions.CantCreateNewDeveloperException; import com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.DeveloperIdentityPluginRoot; import com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.exceptions.CantInitializeDeveloperIdentityDatabaseException; // Packages and classes to import of apache commons. import static org.apache.commons.lang3.StringUtils.isEmpty; /** * The Class <code>com.bitdubai.fermat_pip_plugin.layer.identity.developer.developer.bitdubai.version_1.structure.DeveloperIdentityDao</code> * all methods implementation to access the data base<p/> * <p/> * * Created by Leon Acosta - ([email protected]) on 14/07/15. * Updated by Raul Pena - ([email protected]) on 16/07/15. * * @version 1.0 * @since Java JDK 1.7 */ public class DeveloperIdentityDao implements DealsWithPluginDatabaseSystem, DealsWithPluginIdentity { // Private instance fields declarations. // DealsWithPluginDatabaseSystem Interface member variables. private PluginDatabaseSystem pluginDatabaseSystem = null; // Database factory private DeveloperIdentityDatabaseFactory databaseFactory = null; // Database object. private Database dataBase = null; private UUID pluginId = null; // DealsWithlogManager interface member variable. private LogManager logManager = null; // Private class fields declarations. // Blank target. private static final String _DEFAUL_STRING = ""; // Public constructor declarations. /** * * <p>Constructor without parameters. * * * */ public DeveloperIdentityDao () { // Call to super class. super (); } /** * * <p>Constructor with parameters. * * @param pluginDatabaseSystem * @param databaseFactory * @param pluginId * * */ public DeveloperIdentityDao (PluginDatabaseSystem pluginDatabaseSystem, DeveloperIdentityDatabaseFactory databaseFactory, UUID pluginId,LogManager logManager) { // Call to super class. super (); // Set internal values. this.pluginDatabaseSystem = pluginDatabaseSystem; this.databaseFactory = databaseFactory; this.pluginId = pluginId; this.logManager = logManager; } // Private instance methods declarations. /* * * <p>Method that check if alias exists. * * @param alias * @return Boolean that indicate if the alias exists or not. * */ private boolean aliasExists (String alias) throws CantCreateNewDeveloperException { // Setup method. DatabaseTable table; // Developer table. // Check the arguments. if (isEmpty (alias)) { // Cancel the process. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Alias is empty, arguments are null or empty.", _DEFAUL_STRING, _DEFAUL_STRING); return Boolean.FALSE; } if (this.dataBase == null) { // Cancel the process. throw new CantCreateNewDeveloperException ("Cant check if alias exists or not, Database is closed o null.", "Plugin Identity", "Cant check if alias exists or not, Database is closed o null."); } // Get developers identities list. try { logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Checking if alias " + alias + " exists.", _DEFAUL_STRING, _DEFAUL_STRING); // 1) Get the table. logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); table = this.dataBase.getTable (DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME); if (table == null) { // Table not found. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Cant check if alias exists, table not " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " found.", _DEFAUL_STRING, _DEFAUL_STRING); throw new CantGetUserDeveloperIdentitiesException("Cant check if alias exists, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found.", "Plugin Identity", "Cant check if alias exists, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found."); } // 2) Find the developers. logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Applying filter to " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table by developer alias key [" + alias.trim() + "].", _DEFAUL_STRING, _DEFAUL_STRING); table.setStringFilter(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_ALIAS_COLUMN_NAME, alias, DatabaseFilterType.EQUAL); table.loadToMemory(); // 3) Get developers. logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Developer alias found (" + table.getRecords().size() + ") by alias [" + alias + "].", _DEFAUL_STRING, _DEFAUL_STRING); return table.getRecords ().size () > 0; } catch (CantLoadTableToMemoryException em) { // Failure unknown. throw new CantCreateNewDeveloperException (em.getMessage(), em, "Plugin Identity", "Cant load " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table in memory."); } catch (Exception e) { // Failure unknown. throw new CantCreateNewDeveloperException (e.getMessage(), e, "Plugin Identity", "Cant check if alias exists, unknown failure."); } } // Public instance methods declarations extends of com.bitdubai.fermat_api.layer.osa_android.database_system.DealsWithPluginDatabaseSystem /** * DealsWithPluginDatabaseSystem Interface implementation. */ @Override public void setPluginDatabaseSystem (PluginDatabaseSystem pluginDatabaseSystem) { // Set internal values. this.pluginDatabaseSystem = pluginDatabaseSystem; } /** * DealsWithPluginIdentity Interface implementation. */ @Override public void setPluginId (UUID pluginId) { // Set value. this.pluginId = pluginId; } // Public instance methods declarations. /** * * <p>Method tha set the Database factory. * */ public void setDeveloperIdentityDatabaseFactory (DeveloperIdentityDatabaseFactory databaseFactory) { // Set the value. this.databaseFactory = databaseFactory; } /** * This method open or creates the database i'll be working with. * * @param ownerId plugin id * @throws CantInitializeDeveloperIdentityDatabaseException */ public void initializeDatabase (UUID ownerId) throws CantInitializeDeveloperIdentityDatabaseException { // Check the arguments. if (ownerId == null) { // Cancel the process. throw new CantInitializeDeveloperIdentityDatabaseException ("Cant create database, arguments are null or empty.", "Plugin Identity", "Cant create database, arguments are null or empty."); } // Create the database. try { logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Initializing identity database system...", _DEFAUL_STRING, _DEFAUL_STRING); this.databaseFactory.setPluginDatabaseSystem(this.pluginDatabaseSystem); //Check the database exist try { this.dataBase = pluginDatabaseSystem.openDatabase(pluginId, DeveloperIdentityDatabaseConstants.DEVELOPER_DB_NAME); } catch (CantOpenDatabaseException e) { this.dataBase = this.databaseFactory.createDatabase (ownerId, DeveloperIdentityDatabaseConstants.DEVELOPER_DB_NAME ); } logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Identity database initialized..." ,_DEFAUL_STRING, _DEFAUL_STRING); } catch (CantCreateDatabaseException e){ throw new CantInitializeDeveloperIdentityDatabaseException(e.getMessage(), e, "Plugin Identity", "Cant create database."); } catch (Exception e) { // Failure unknown. throw new CantInitializeDeveloperIdentityDatabaseException (e.getMessage(), e, "Plugin Identity", "Cant create database, unknown failure."); } } /** * Method that create a new developer in the database. * * @param alias alias of developer * @param developerKeyPair new private and public key for the developer * @param deviceUser logged in device user * @return DeveloperIdentity * @throws CantCreateNewDeveloperException */ public DeveloperIdentity createNewDeveloper (String alias, ECCKeyPair developerKeyPair, DeviceUser deviceUser) throws CantCreateNewDeveloperException { // Check the arguments. if (developerKeyPair == null || isEmpty (alias) || deviceUser == null) { // Cancel the process. throw new CantCreateNewDeveloperException ("Cant create new developer, arguments are null or empty.", "Plugin Identity", "Cant create database, arguments are null or empty."); } if (this.dataBase == null) { // Cancel the process. throw new CantCreateNewDeveloperException ("Cant create new developer, Database is closed o null.", "Plugin Identity", "Cant create new developer, Database is closed o null."); } // Create the new developer. try { logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Initializing developer record creation.", _DEFAUL_STRING, _DEFAUL_STRING); if (aliasExists (alias)) { throw new CantCreateNewDeveloperException ("Cant create new developer, alias exists.", "Plugin Identity", "Cant create new developer, alias exists."); } logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); DatabaseTable table = this.dataBase.getTable (DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME); DatabaseTableRecord record = table.getEmptyRecord (); logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PUBLIC_KEY_COLUMN_NAME, developerKeyPair.getPublicKey()); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PRIVATE_KEY_COLUMN_NAME, developerKeyPair.getPrivateKey()); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVICE_USER_PUBLIC_KEY_COLUMN_NAME, deviceUser.getPublicKey()); record.setStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_ALIAS_COLUMN_NAME, alias);//deviceUser.getAlias() // logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Inserting [Alias=" + deviceUser.getAlias() + ", PK=" + developerKeyPair.getPublicKey() + "] in " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); table.insertRecord(record); // logManager.log(DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "New developer record created [" + developerKeyPair.getPublicKey() + "]", _DEFAUL_STRING, _DEFAUL_STRING); } catch (CantInsertRecordException e){ // Cant insert record. throw new CantCreateNewDeveloperException (e.getMessage(), e, "Plugin Identity", "Cant create new developer, insert database problems."); } catch (Exception e) { // Failure unknown. throw new CantCreateNewDeveloperException (e.getMessage(), e, "Plugin Identity", "Cant create new developer, unknown failure."); } // Return the new developer. return new DeveloperIdentityRecord (alias,developerKeyPair.getPublicKey(),developerKeyPair.getPrivateKey()); } /** * Method that list the developers related to the parametrized device user. * * @param deviceUser device user * @throws CantGetUserDeveloperIdentitiesException */ public List<DeveloperIdentity> getDevelopersFromCurrentDeviceUser (DeviceUser deviceUser) throws CantGetUserDeveloperIdentitiesException { // Setup method. List<DeveloperIdentity> list = new ArrayList<DeveloperIdentity> (); // Developer list. DatabaseTable table; // Developer table. // Check the arguments. if (deviceUser == null) { // Cancel the process. throw new CantGetUserDeveloperIdentitiesException ("Cant get developers from current device, arguments are null or empty.", "Plugin Identity", "Cant get developers from current device, arguments are null or empty."); } if (this.dataBase == null) { // Cancel the process. throw new CantGetUserDeveloperIdentitiesException ("Cant get developers from current device, Database is closed o null.", "Plugin Identity", "Cant get developers from current device, Database is closed o null."); } // Get developers identities list. try { logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting developer list.", _DEFAUL_STRING, _DEFAUL_STRING); // 1) Get the table. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Getting " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table and record.", _DEFAUL_STRING, _DEFAUL_STRING); table = this.dataBase.getTable (DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME); if (table == null) { // Table not found. throw new CantGetUserDeveloperIdentitiesException ("Cant get developer identity list, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found.", "Plugin Identity", "Cant get developer identity list, table not \" + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + \" found."); } // 2) Find the developers. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Applying filter to " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table by developer public key [" + deviceUser.getPublicKey() + "].", _DEFAUL_STRING, _DEFAUL_STRING); table.setStringFilter(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PUBLIC_KEY_COLUMN_NAME, deviceUser.getPublicKey(), DatabaseFilterType.EQUAL); table.loadToMemory(); // 3) Get developers. logManager.log (DeveloperIdentityPluginRoot.getLogLevelByClass(this.getClass().getName()), "Developer identity found (" + table.getRecords ().size () + ") by public key [" + deviceUser.getPublicKey () + "].", _DEFAUL_STRING, _DEFAUL_STRING); for (DatabaseTableRecord record : table.getRecords ()) { // Add records to list. list.add(new DeveloperIdentityRecord (record.getStringValue(DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_ALIAS_COLUMN_NAME), record.getStringValue (DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PUBLIC_KEY_COLUMN_NAME),record.getStringValue (DeveloperIdentityDatabaseConstants.DEVELOPER_DEVELOPER_PRIVATE_KEY_COLUMN_NAME))); } } catch (CantLoadTableToMemoryException em) { // Failure unknown. throw new CantGetUserDeveloperIdentitiesException (em.getMessage(), em, "Plugin Identity", "Cant load " + DeveloperIdentityDatabaseConstants.DEVELOPER_TABLE_NAME + " table in memory."); } catch (Exception e) { // Failure unknown. throw new CantGetUserDeveloperIdentitiesException (e.getMessage(), e, "Plugin Identity", "Cant get developer identity list, unknown failure."); } // Return the list values. return list; } }
Fix #937 Corregir errores en el Plugin Developer de la capa Identity
PIP/plugin/identity/fermat-pip-plugin-identity-developer-bitdubai/src/main/java/com/bitdubai/fermat_pip_plugin/layer/identity/developer/developer/bitdubai/version_1/structure/DeveloperIdentityDao.java
Fix #937 Corregir errores en el Plugin Developer de la capa Identity
Java
epl-1.0
1e79f6ae9c41846638afa62d396e37be1137e159
0
bendisposto/prob2-ui,bendisposto/prob2-ui,bendisposto/prob2-ui,bendisposto/prob2-ui
package de.prob2.ui.internal; import com.google.inject.Inject; import com.google.inject.Injector; import de.prob2.ui.menu.FileMenu; import de.prob2.ui.prob2fx.CurrentTrace; import javafx.fxml.FXML; import javafx.scene.control.Button; import javafx.scene.layout.HBox; public final class NavigationButtons extends HBox { @FXML private Button backButton; @FXML private Button fastBackButton; @FXML private Button forwardButton; @FXML private Button fastForwardButton; @FXML private Button reloadButton; private final CurrentTrace currentTrace; private final Injector injector; @Inject private NavigationButtons(final StageManager stageManager, final CurrentTrace currentTrace, final Injector injector) { super(); this.currentTrace = currentTrace; this.injector = injector; stageManager.loadFXML(this, "navigation_buttons.fxml"); } @FXML private void initialize() { backButton.disableProperty().bind(currentTrace.canGoBackProperty().not()); fastBackButton.disableProperty().bind(currentTrace.canGoBackProperty().not()); forwardButton.disableProperty().bind(currentTrace.canGoForwardProperty().not()); fastForwardButton.disableProperty().bind(currentTrace.canGoForwardProperty().not()); reloadButton.disableProperty().bind(currentTrace.existsProperty().not()); } @FXML private void handleBackButton() { if (currentTrace.exists()) { currentTrace.set(currentTrace.back()); } } @FXML private void handleFastBackButton() { if (currentTrace.exists()) { currentTrace.set(currentTrace.get().gotoPosition(-1)); } } @FXML private void handleForwardButton() { if (currentTrace.exists()) { currentTrace.set(currentTrace.forward()); } } @FXML private void handleFastForwardButton() { if (currentTrace.exists()) { currentTrace.set(currentTrace.get().gotoPosition(currentTrace.get().size()-1)); } } @FXML private void handleReloadButton() { injector.getInstance(FileMenu.class).handleReloadMachine(); } }
src/main/java/de/prob2/ui/internal/NavigationButtons.java
package de.prob2.ui.internal; import com.google.inject.Inject; import com.google.inject.Injector; import de.prob2.ui.menu.FileMenu; import de.prob2.ui.prob2fx.CurrentTrace; import javafx.fxml.FXML; import javafx.scene.control.Button; import javafx.scene.layout.HBox; public final class NavigationButtons extends HBox { @FXML private Button backButton; @FXML private Button fastBackButton; @FXML private Button forwardButton; @FXML private Button fastForwardButton; @FXML private Button reloadButton; private final CurrentTrace currentTrace; private final Injector injector; @Inject private NavigationButtons(final StageManager stageManager, final CurrentTrace currentTrace, final Injector injector) { super(); this.currentTrace = currentTrace; this.injector = injector; stageManager.loadFXML(this, "navigation_buttons.fxml"); } @FXML private void initialize() { backButton.disableProperty().bind(currentTrace.canGoBackProperty().not()); fastBackButton.disableProperty().bind(currentTrace.canGoBackProperty().not()); forwardButton.disableProperty().bind(currentTrace.canGoForwardProperty().not()); fastForwardButton.disableProperty().bind(currentTrace.canGoForwardProperty().not()); reloadButton.disableProperty().bind(currentTrace.existsProperty().not()); } @FXML private void handleBackButton() { if (currentTrace.exists()) { currentTrace.set(currentTrace.back()); } } @FXML private void handleFastBackButton() { if (currentTrace.exists()) { while(currentTrace.canGoBack()) { currentTrace.set(currentTrace.back()); } } } @FXML private void handleForwardButton() { if (currentTrace.exists()) { currentTrace.set(currentTrace.forward()); } } @FXML private void handleFastForwardButton() { if (currentTrace.exists()) { while(currentTrace.canGoForward()) { currentTrace.set(currentTrace.forward()); } } } @FXML private void handleReloadButton() { injector.getInstance(FileMenu.class).handleReloadMachine(); } }
Use gotoPosition instead of loop in fast back/forward implementation
src/main/java/de/prob2/ui/internal/NavigationButtons.java
Use gotoPosition instead of loop in fast back/forward implementation
Java
epl-1.0
115709ee9b7cfdd06c1fc87c15a00f57ed6fabc3
0
vadimnehta/mdht,vadimnehta/mdht,mdht/mdht,drbgfc/mdht,sarpkayanehta/mdht,vadimnehta/mdht,sarpkayanehta/mdht,mdht/mdht,sarpkayanehta/mdht,sarpkayanehta/mdht,drbgfc/mdht,mdht/mdht,mdht/mdht,sarpkayanehta/mdht,vadimnehta/mdht,vadimnehta/mdht,drbgfc/mdht,mdht/mdht,drbgfc/mdht,drbgfc/mdht,sarpkayanehta/mdht,drbgfc/mdht,vadimnehta/mdht
/******************************************************************************* * Copyright (c) 2006, 2011 David A Carlson and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * David A Carlson (XMLmodeling.com) - initial API and implementation * Kenn Hussey - adding support for showing business names (or not) * * $Id$ *******************************************************************************/ package org.openhealthtools.mdht.uml.edit.provider; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.edit.provider.ITableItemLabelProvider; import org.eclipse.jface.viewers.ICellModifier; import org.eclipse.uml2.uml.Class; import org.eclipse.uml2.uml.Classifier; import org.eclipse.uml2.uml.Element; import org.eclipse.uml2.uml.NamedElement; import org.eclipse.uml2.uml.Profile; import org.eclipse.uml2.uml.Property; import org.eclipse.uml2.uml.VisibilityKind; import org.eclipse.uml2.uml.edit.providers.ClassItemProvider; import org.openhealthtools.mdht.uml.common.notation.ClassNotationUtil; import org.openhealthtools.mdht.uml.common.notation.INotationProvider; import org.openhealthtools.mdht.uml.common.notation.IUMLNotation; import org.openhealthtools.mdht.uml.common.notation.NotationRegistry; import org.openhealthtools.mdht.uml.common.util.NamedElementUtil; import org.openhealthtools.mdht.uml.edit.IUMLTableProperties; import org.openhealthtools.mdht.uml.edit.provider.operations.NamedElementOperations; /** * * @version $Id: $ */ public class ClassExtItemProvider extends ClassItemProvider implements ITableItemLabelProvider, ICellModifier { /** * @param adapterFactory */ public ClassExtItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /* (non-Javadoc) * @see org.eclipse.uml2.uml.provider.ClassItemProvider#getImage(java.lang.Object) */ public Object getImage(Object object) { return super.getImage(object); } protected String getName(NamedElement namedElement) { AdapterFactory adapterFactory = getAdapterFactory(); return adapterFactory instanceof UML2ExtendedAdapterFactory && ((UML2ExtendedAdapterFactory) adapterFactory) .isShowBusinessNames() ? NamedElementUtil .getBusinessName(namedElement) : namedElement.getName(); } /* (non-Javadoc) * @see org.eclipse.uml2.uml.provider.ClassItemProvider#getText(java.lang.Object) */ public String getText(Object object) { String label = getName((org.eclipse.uml2.uml.Class) object); return label == null || label.length() == 0 ? getString("_UI_Class_type") : //$NON-NLS-1$ label; } /* (non-Javadoc) * @see org.eclipse.emf.edit.provider.ItemProviderAdapter#getChildren(java.lang.Object) */ public Collection<Element> getChildren(Object object) { Class clazz = (Class) object; List<Element> children = new ArrayList<Element>(); children.addAll(clazz.getOwnedComments()); for (Property property : clazz.getOwnedAttributes()) { if (property.getAssociation() == null) { children.add(property); } } // include associations after attributes for (Property property : clazz.getOwnedAttributes()) { if (property.getAssociation() != null && property.getOtherEnd().getType() == clazz) { children.add(property.getAssociation()); } } children.addAll(clazz.getOwnedOperations()); children.addAll(clazz.getNestedClassifiers()); children.addAll(clazz.getOwnedRules()); children.addAll(clazz.getClientDependencies()); children.addAll(clazz.getGeneralizations()); return children; } public Object getColumnImage(Object object, int columnIndex) { switch (columnIndex) { case IUMLTableProperties.NAME_INDEX: return getImage(object); default: return null; } } public String getColumnText(Object element, int columnIndex) { Class classifier = (Class) element; switch (columnIndex) { case IUMLTableProperties.NAME_INDEX: return getName(classifier); case IUMLTableProperties.VISIBILITY_INDEX: if (VisibilityKind.PUBLIC_LITERAL == classifier.getVisibility()) return ""; else return classifier.getVisibility().getName(); case IUMLTableProperties.ANNOTATION_INDEX: { for (Profile profile : classifier.getNearestPackage().getAllAppliedProfiles()) { // eResource is null for unresolved eProxyURI, missing profiles if (profile.eResource() != null) { // use the first notation provider found for an applied profile, ignore others String profileURI = profile.eResource().getURI().toString(); INotationProvider provider = NotationRegistry.INSTANCE.getProviderInstance(profileURI); if (provider != null) { return provider.getAnnotation(classifier); } } } return ClassNotationUtil.getCustomLabel(classifier, IUMLNotation.DEFAULT_UML_CLASS_ANNOTATIONS); } default: return null; } } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ICellModifier#canModify(java.lang.Object, java.lang.String) */ public boolean canModify(Object element, String property) { if (IUMLTableProperties.NAME_PROPERTY.equals(property)) { return true; } else if (IUMLTableProperties.VISIBILITY_PROPERTY.equals(property)) { return true; } return false; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ICellModifier#getValue(java.lang.Object, java.lang.String) */ public Object getValue(Object element, String property) { Classifier classifier = (Classifier) element; if (IUMLTableProperties.NAME_PROPERTY.equals(property)) { return classifier.getName(); } else if (IUMLTableProperties.VISIBILITY_PROPERTY.equals(property)) { return new Integer(classifier.getVisibility().getValue()); } return null; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ICellModifier#modify(java.lang.Object, java.lang.String, java.lang.Object) */ public void modify(final Object element, final String property, final Object value) { NamedElementOperations.modify(element, property, value); } }
core/plugins/org.openhealthtools.mdht.uml.edit/src/org/openhealthtools/mdht/uml/edit/provider/ClassExtItemProvider.java
/******************************************************************************* * Copyright (c) 2006, 2011 David A Carlson and others. * All rights reserved. This program and the accompanying materials * are made available under the terms of the Eclipse Public License v1.0 * which accompanies this distribution, and is available at * http://www.eclipse.org/legal/epl-v10.html * * Contributors: * David A Carlson (XMLmodeling.com) - initial API and implementation * Kenn Hussey - adding support for showing business names (or not) * * $Id$ *******************************************************************************/ package org.openhealthtools.mdht.uml.edit.provider; import java.util.ArrayList; import java.util.Collection; import java.util.List; import org.eclipse.emf.common.notify.AdapterFactory; import org.eclipse.emf.edit.provider.ITableItemLabelProvider; import org.eclipse.jface.viewers.ICellModifier; import org.eclipse.uml2.uml.Class; import org.eclipse.uml2.uml.Classifier; import org.eclipse.uml2.uml.Element; import org.eclipse.uml2.uml.NamedElement; import org.eclipse.uml2.uml.Profile; import org.eclipse.uml2.uml.Property; import org.eclipse.uml2.uml.VisibilityKind; import org.eclipse.uml2.uml.edit.providers.ClassItemProvider; import org.openhealthtools.mdht.uml.common.notation.ClassNotationUtil; import org.openhealthtools.mdht.uml.common.notation.INotationProvider; import org.openhealthtools.mdht.uml.common.notation.IUMLNotation; import org.openhealthtools.mdht.uml.common.notation.NotationRegistry; import org.openhealthtools.mdht.uml.common.util.NamedElementUtil; import org.openhealthtools.mdht.uml.edit.IUMLTableProperties; import org.openhealthtools.mdht.uml.edit.provider.operations.NamedElementOperations; /** * * @version $Id: $ */ public class ClassExtItemProvider extends ClassItemProvider implements ITableItemLabelProvider, ICellModifier { /** * @param adapterFactory */ public ClassExtItemProvider(AdapterFactory adapterFactory) { super(adapterFactory); } /* (non-Javadoc) * @see org.eclipse.uml2.uml.provider.ClassItemProvider#getImage(java.lang.Object) */ public Object getImage(Object object) { return super.getImage(object); } protected String getName(NamedElement namedElement) { AdapterFactory adapterFactory = getAdapterFactory(); return adapterFactory instanceof UML2ExtendedAdapterFactory && ((UML2ExtendedAdapterFactory) adapterFactory) .isShowBusinessNames() ? NamedElementUtil .getBusinessName(namedElement) : namedElement.getName(); } /* (non-Javadoc) * @see org.eclipse.uml2.uml.provider.ClassItemProvider#getText(java.lang.Object) */ public String getText(Object object) { String label = getName((org.eclipse.uml2.uml.Class) object); return label == null || label.length() == 0 ? getString("_UI_Class_type") : //$NON-NLS-1$ label; } /* (non-Javadoc) * @see org.eclipse.emf.edit.provider.ItemProviderAdapter#getChildren(java.lang.Object) */ public Collection<Element> getChildren(Object object) { Class clazz = (Class) object; List<Element> children = new ArrayList<Element>(); children.addAll(clazz.getOwnedComments()); for (Property property : clazz.getOwnedAttributes()) { if (property.getAssociation() == null) { children.add(property); } } // include associations after attributes for (Property property : clazz.getOwnedAttributes()) { if (property.getAssociation() != null && property.getOtherEnd().getType() == clazz) { children.add(property.getAssociation()); } } children.addAll(clazz.getOwnedOperations()); children.addAll(clazz.getOwnedRules()); children.addAll(clazz.getNestedClassifiers()); children.addAll(clazz.getClientDependencies()); children.addAll(clazz.getGeneralizations()); return children; } public Object getColumnImage(Object object, int columnIndex) { switch (columnIndex) { case IUMLTableProperties.NAME_INDEX: return getImage(object); default: return null; } } public String getColumnText(Object element, int columnIndex) { Class classifier = (Class) element; switch (columnIndex) { case IUMLTableProperties.NAME_INDEX: return getName(classifier); case IUMLTableProperties.VISIBILITY_INDEX: if (VisibilityKind.PUBLIC_LITERAL == classifier.getVisibility()) return ""; else return classifier.getVisibility().getName(); case IUMLTableProperties.ANNOTATION_INDEX: { for (Profile profile : classifier.getNearestPackage().getAllAppliedProfiles()) { // eResource is null for unresolved eProxyURI, missing profiles if (profile.eResource() != null) { // use the first notation provider found for an applied profile, ignore others String profileURI = profile.eResource().getURI().toString(); INotationProvider provider = NotationRegistry.INSTANCE.getProviderInstance(profileURI); if (provider != null) { return provider.getAnnotation(classifier); } } } return ClassNotationUtil.getCustomLabel(classifier, IUMLNotation.DEFAULT_UML_CLASS_ANNOTATIONS); } default: return null; } } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ICellModifier#canModify(java.lang.Object, java.lang.String) */ public boolean canModify(Object element, String property) { if (IUMLTableProperties.NAME_PROPERTY.equals(property)) { return true; } else if (IUMLTableProperties.VISIBILITY_PROPERTY.equals(property)) { return true; } return false; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ICellModifier#getValue(java.lang.Object, java.lang.String) */ public Object getValue(Object element, String property) { Classifier classifier = (Classifier) element; if (IUMLTableProperties.NAME_PROPERTY.equals(property)) { return classifier.getName(); } else if (IUMLTableProperties.VISIBILITY_PROPERTY.equals(property)) { return new Integer(classifier.getVisibility().getValue()); } return null; } /* (non-Javadoc) * @see org.eclipse.jface.viewers.ICellModifier#modify(java.lang.Object, java.lang.String, java.lang.Object) */ public void modify(final Object element, final String property, final Object value) { NamedElementOperations.modify(element, property, value); } }
Move nested classifiers before owned rules.
core/plugins/org.openhealthtools.mdht.uml.edit/src/org/openhealthtools/mdht/uml/edit/provider/ClassExtItemProvider.java
Move nested classifiers before owned rules.
Java
mpl-2.0
a3814a28c0547301edf2a7aa136e78ae3d234fe5
0
etomica/etomica,etomica/etomica,etomica/etomica
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package etomica.virial.simulations; import etomica.action.IAction; import etomica.atom.AtomType; import etomica.box.Box; import etomica.chem.elements.*; import etomica.data.histogram.HistogramSimple; import etomica.graphics.ColorSchemeRandomByMolecule; import etomica.graphics.DisplayBox; import etomica.graphics.DisplayBoxCanvasG3DSys; import etomica.graphics.SimulationGraphic; import etomica.integrator.IntegratorEvent; import etomica.integrator.IntegratorListener; import etomica.math.DoubleRange; import etomica.math.SpecialFunctions; import etomica.molecule.IMoleculeList; import etomica.molecule.MoleculePositionCOM; import etomica.potential.IPotential; import etomica.potential.P2PotentialGroupBuilder; import etomica.potential.PotentialGroup; import etomica.space.Space; import etomica.space.Vector; import etomica.space3d.Space3D; import etomica.space3d.Vector3D; import etomica.species.Species; import etomica.species.SpeciesBuilder; import etomica.units.Degree; import etomica.units.Electron; import etomica.units.Kelvin; import etomica.util.ParameterBase; import etomica.util.ParseArgs; import etomica.util.random.RandomMersenneTwister; import etomica.virial.*; import etomica.virial.cluster.Standard; import java.awt.*; import java.util.Arrays; /** * Compute pure and binary mixture virial coefficients using overlap sampling simulations * for some molecules using the TraPPE force fields. * */ public class VirialTraPPE { public static void main(String[] args) { VirialParam params = new VirialParam(); boolean isCommandline = args.length > 0; if (isCommandline) { ParseArgs.doParseArgs(params, args); } else { // customize parameters here params.chemForm = new ChemForm[]{ChemForm.N2,ChemForm.O2}; params.nPoints = 2; params.nTypes = new int[]{1,1}; params.nDer = 3; params.temperature = 450; params.numSteps = 1000000; params.refFrac = -1; params.sigmaHSRef = 5; params.seed = null; params.doHist = false; params.dorefpref = false; params.doChainRef = true; params.BDtol = 1e-12; } final ChemForm[] chemForm = params.chemForm; final int nPoints = params.nPoints; final int[] nTypes = params.nTypes; final int nDer = params.nDer; final double temperatureK = params.temperature; final long steps = params.numSteps; double refFrac = params.refFrac; double sigmaHSRef = params.sigmaHSRef; int[] seed = params.seed; boolean doHist = params.doHist; boolean dorefpref = params.dorefpref; boolean doChainRef = params.doChainRef; double BDtol = params.BDtol; final double BDAccFrac = 0.001; //if( chemForm.length == 0 ) throw new RuntimeException("chemForm length is wrong!"); /*for(int i=0; i<chemForm.length; i++){ if( chemForm[i]== null) throw new RuntimeException("chemForm["+i+"] is null!"); }*/ //if( nTypes.length == 0 ) throw new RuntimeException("nTypes length is wrong!"); /*for(int i=0; i<nTypes.length; i++){ if( nTypes[i]== 0) throw new RuntimeException("nTypes["+i+"] is 0!"); }*/ if( chemForm.length != nTypes.length ) throw new RuntimeException("chemFrom and nTypes lengths are unequal!"); if( chemForm.length > 1 && Arrays.stream(nTypes).sum() != nPoints ) throw new RuntimeException("nPoints and nTypes do not match!"); /* if(chemForm.length > 1) { for(int i=0; i<chemForm.length; i++){ for(int j=i+1; j<chemForm.length; j++){ if( chemForm[i] == chemForm[j] ) throw new RuntimeException("chemForm["+i+"] and chemForm["+j+"] are "+chemForm[i]+". No repetitions in chemForm allowed!"); } } }*/ //if( chemForm[0] == null || nTypes[0] == 0 ) throw new RuntimeException("Reformat input"); //if( chemForm.length > 1 && chemForm[1] == null && nTypes[1] != 0 ) throw new RuntimeException("chemForm 1 incomplete!"); boolean isMixture = ( nTypes.length > 1 ) ; if(isMixture){ for(int i=0; i<nTypes.length; i++){ if(nTypes[i]==nPoints) isMixture=false; } } double temperature = Kelvin.UNIT.toSim(temperatureK); final long numBlocks = 1000; long blockSize = steps/numBlocks; int EqSubSteps = 1000; double vhs = (4.0 / 3.0) * Math.PI * sigmaHSRef * sigmaHSRef * sigmaHSRef; final double HSBn = doChainRef ? SpecialFunctions.factorial(nPoints) / 2 * Math.pow(vhs, nPoints - 1) : Standard.BHS(nPoints, sigmaHSRef); if(!isMixture) { ChemForm chemFormPure = chemForm[0]; if(nTypes.length>1) { for(int i=0; i<nTypes.length; i++){ if(nTypes[i]==nPoints) chemFormPure=chemForm[i]; } } System.out.println("Overlap sampling for TraPPE " + chemFormPure + " at " + temperatureK + " K " + "for B" + nPoints + " and " + nDer + " derivatives"); } else{ String nTstr="{"; for(int i=0; i<nTypes.length; i++){ if(nTypes[i]!=0) nTstr += ((nTstr=="{") ? "":",")+nTypes[i]; } nTstr+="}"; String CFstr=""; for(int i=0; i<chemForm.length; i++){ if(nTypes[i]!=0) CFstr += chemForm[i]+" "; } System.out.println("Overlap sampling for TraPPE " + CFstr + " " +nTstr + " Mixture at " + temperatureK + " K " + "for B" + nPoints + " and " + nDer + " derivatives"); } System.out.println("Reference diagram: B"+nPoints+" for hard spheres with diameter " + sigmaHSRef + " Angstroms"); System.out.println(" B"+nPoints+"HS: "+HSBn); System.out.println(steps + " steps (" + numBlocks + " blocks of " + blockSize + ")"); Space space = Space3D.getInstance(); MayerFunction fRefPos = new MayerFunction() { public void setBox(Box box) { } public IPotential getPotential() { return null; } public double f(IMoleculeList pair, double r2, double beta) { return r2 < sigmaHSRef * sigmaHSRef ? 1 : 0; } }; // Setting up reference cluster MayerHardSphere fRef = new MayerHardSphere(sigmaHSRef); ClusterAbstract refCluster = doChainRef ? new ClusterChainHS(nPoints, fRefPos) : new ClusterWheatleyHS(nPoints, fRef); refCluster.setTemperature(temperature); //Setting up target cluster Species species[] = null; ClusterAbstractMultivalue targetCluster = null; ClusterAbstractMultivalue targetClusterBD = null; boolean allPolar = true; MayerFunction[][] fAll = new MayerFunction[nTypes.length][nTypes.length]; species = new Species[chemForm.length]; TraPPEParams[] TPList = new TraPPEParams[chemForm.length]; for(int i=0; i<TPList.length; i++){ TPList[i] = new TraPPEParams(space, chemForm[i]); } for(int i=0; i<chemForm.length; i++){ TraPPEParams TPi = TPList[i]; PotentialGroup PGii = TPi.potentialGroup; Species speciesi = TPi.species; species[i] = speciesi; P2PotentialGroupBuilder.ModelParams MPi = new P2PotentialGroupBuilder.ModelParams(TPi.atomTypes,TPi.sigma,TPi.epsilon,TPi.charge); fAll[i][i] = new MayerGeneral(PGii); allPolar=(allPolar&&TPi.polar); for(int j=i+1; j<chemForm.length; j++){ TraPPEParams TPj = TPList[j]; P2PotentialGroupBuilder.ModelParams MPj = new P2PotentialGroupBuilder.ModelParams(TPj.atomTypes,TPj.sigma,TPj.epsilon,TPj.charge); PotentialGroup PGij = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,MPi,MPj); fAll[i][j] = fAll[j][i] = new MayerGeneral(PGij); } } targetCluster = new ClusterWheatleySoftDerivativesMix(nPoints, nTypes,fAll, BDtol, nDer); targetCluster.setTemperature(temperature); if(allPolar && nPoints==2) { System.out.println("Performing Flipping"); ((ClusterWheatleySoftDerivativesMix) targetCluster).setTolerance(0); final int precision = -3*(int)Math.log10(BDtol); targetClusterBD = new ClusterWheatleySoftDerivativesMixBD(nPoints,nTypes,fAll,precision,nDer); targetClusterBD.setTemperature(temperature); ((ClusterWheatleySoftDerivativesMix) targetCluster).setDoCaching(false); ((ClusterWheatleySoftDerivativesMixBD) targetClusterBD).setDoCaching(false); targetCluster = new ClusterCoupledFlippedMultivalue(targetCluster, targetClusterBD, space, 20, nDer, BDtol); } //System.exit(1); final SimulationVirialOverlap2 sim = new SimulationVirialOverlap2(space, species, nTypes, temperature,refCluster,targetCluster); if(seed!=null)sim.setRandom(new RandomMersenneTwister(seed)); System.out.println("random seeds: "+ Arrays.toString(seed==null?sim.getRandomSeeds():seed)); if(targetCluster instanceof ClusterCoupledFlippedMultivalue) { ((ClusterCoupledFlippedMultivalue) targetCluster).setBDAccFrac(BDAccFrac,sim.getRandom()); } else { ((ClusterWheatleySoftDerivativesMix) targetCluster).setBDAccFrac(BDAccFrac,sim.getRandom()); } ClusterMultiToSingle[] primes = new ClusterMultiToSingle[nDer]; for(int m=0;m<primes.length;m++){ primes[m]= new ClusterMultiToSingle(((ClusterAbstractMultivalue) targetCluster), m+1); } sim.setExtraTargetClusters(primes); sim.init(); sim.box[0].setPositionDefinition(new MoleculePositionCOM(space)); sim.box[1].setPositionDefinition(new MoleculePositionCOM(space)); if (doChainRef) { sim.integrators[0].getMoveManager().removeMCMove(sim.mcMoveTranslate[0]); MCMoveClusterMoleculeHSChain mcMoveHSC = new MCMoveClusterMoleculeHSChain(sim.getRandom(), space, sigmaHSRef); sim.integrators[0].getMoveManager().addMCMove(mcMoveHSC); sim.accumulators[0].setBlockSize(1); } sim.integratorOS.setNumSubSteps(EqSubSteps); sim.integratorOS.setAggressiveAdjustStepFraction(true); if (false) { sim.box[0].getBoundary().setBoxSize(space.makeVector(new double[]{10,10,10})); sim.box[1].getBoundary().setBoxSize(space.makeVector(new double[]{10,10,10})); SimulationGraphic simGraphic = new SimulationGraphic(sim, SimulationGraphic.TABBED_PANE); DisplayBox displayBox0 = simGraphic.getDisplayBox(sim.box[0]); DisplayBox displayBox1 = simGraphic.getDisplayBox(sim.box[1]); // displayBox0.setPixelUnit(new Pixel(300.0/size)); // displayBox1.setPixelUnit(new Pixel(300.0/size)); displayBox0.setShowBoundary(false); displayBox1.setShowBoundary(false); ((DisplayBoxCanvasG3DSys)displayBox0.canvas).setBackgroundColor(Color.WHITE); ((DisplayBoxCanvasG3DSys)displayBox1.canvas).setBackgroundColor(Color.WHITE); ColorSchemeRandomByMolecule colorScheme = new ColorSchemeRandomByMolecule(sim, sim.box[0], sim.getRandom()); displayBox0.setColorScheme(colorScheme); colorScheme = new ColorSchemeRandomByMolecule(sim, sim.box[1], sim.getRandom()); displayBox1.setColorScheme(colorScheme); simGraphic.makeAndDisplayFrame(); sim.integratorOS.setNumSubSteps(1000); sim.setAccumulatorBlockSize(1000); // if running interactively, set filename to null so that it doens't read // (or write) to a refpref file sim.getController().removeAction(sim.ai); sim.getController().addAction(new IAction() { public void actionPerformed() { sim.initRefPref(null, 10); sim.equilibrate(null, 20); sim.ai.setMaxSteps(Long.MAX_VALUE); } }); sim.getController().addAction(sim.ai); if ((Double.isNaN(sim.refPref) || Double.isInfinite(sim.refPref) || sim.refPref == 0)) { throw new RuntimeException("Oops"); } return; } long t1 = System.currentTimeMillis(); if (refFrac >= 0) { sim.integratorOS.setRefStepFraction(refFrac); sim.integratorOS.setAdjustStepFraction(false); } System.out.println(); String refFileName = null; if (isCommandline) { // if running interactively, don't use the file String tempString = ""+temperatureK; if (temperatureK == (int)temperatureK) { // temperature is an integer, use "200" instead of "200.0" tempString = ""+(int)temperatureK; } refFileName = "refpref_"+"_"+nPoints+"_"+tempString+"K"; } final HistogramSimple targHist = new HistogramSimple(200, new DoubleRange(-1, 4)); IntegratorListener histListenerTarget = new IntegratorListener() { public void integratorStepStarted(IntegratorEvent e) {} public void integratorStepFinished(IntegratorEvent e) { CoordinatePairSet cPairs = sim.box[1].getCPairSet(); for (int i=0; i<nPoints; i++) { for (int j=i+1; j<nPoints; j++) { double r2 = cPairs.getr2(i, j); double r = Math.sqrt(r2); if (r > 1) { r = Math.log(r); } else { r -= 1; } targHist.addValue(r); } } } public void integratorInitialized(IntegratorEvent e) {} }; if (doHist) { System.out.println("collecting histograms"); // only collect the histogram if we're forcing it to run the reference system sim.integrators[1].getEventManager().addListener(histListenerTarget); } sim.initRefPref(refFileName, (steps / EqSubSteps) / 20); sim.equilibrate(refFileName, (steps / EqSubSteps) / 10); System.out.println("equilibration finished"); if(dorefpref){ long t2 = System.currentTimeMillis(); System.out.println("time: "+(t2-t1)/1000.0); return; } IntegratorListener progressReport = new IntegratorListener() { public void integratorStepStarted(IntegratorEvent e) {} public void integratorStepFinished(IntegratorEvent e) { if (sim.integratorOS.getStepCount() % 100 != 0) return; System.out.print(sim.integratorOS.getStepCount()+" steps: "); double[] ratioAndError = sim.dvo.getAverageAndError(); System.out.println("abs average: "+ratioAndError[0]*HSBn+", error: "+ratioAndError[1]*HSBn); } public void integratorInitialized(IntegratorEvent e) {} }; if (false) { sim.integratorOS.getEventManager().addListener(progressReport); } sim.integratorOS.setNumSubSteps((int) blockSize); sim.setAccumulatorBlockSize(blockSize); if (doChainRef) sim.accumulators[0].setBlockSize(1); sim.ai.setMaxSteps(steps / blockSize); for (int i=0; i<2; i++) { if (i > 0 || !doChainRef) System.out.println("MC Move step sizes " + sim.mcMoveTranslate[i].getStepSize()); } sim.getController().actionPerformed(); if (doHist) { double[] xValues = targHist.xValues(); double[] h = targHist.getHistogram(); for (int i=0; i<xValues.length; i++) { if (!Double.isNaN(h[i])) { double r = xValues[i]; double y = h[i]; if (r < 0) r += 1; else { r = Math.exp(r); y /= r; } System.out.println(r+" "+y); } } } System.out.println("final reference step fraction "+sim.integratorOS.getIdealRefStepFraction()); System.out.println("actual reference step fraction "+sim.integratorOS.getRefStepFraction()); String[] extraNames = new String[nDer]; for (int i = 1; i <= nDer; i++) { extraNames[i - 1] = "derivative " + i; } sim.printResults(HSBn, extraNames); long t2 = System.currentTimeMillis(); System.out.println("time: "+(t2-t1)/1000.0); } enum ChemForm { N2, O2, CO2, NH3 } /** * Inner class for parameters */ public static class VirialParam extends ParameterBase { // don't change these public ChemForm[] chemForm = {ChemForm.N2}; public int nPoints = 2; public int[] nTypes = {0}; public int nDer = 3; public double temperature = 400; public long numSteps = 1000000; public double refFrac = -1; public double sigmaHSRef = 5; public int[] seed = null; public boolean doHist = false; public boolean dorefpref = false; public boolean doChainRef = true; public double BDtol = 1e-12; } public static class TraPPEParams{ protected AtomType[] atomTypes; protected double[] sigma; protected double[] epsilon; protected double[] charge; protected Species species; protected PotentialGroup potentialGroup; protected static Element elementM = new ElementSimple("M", 0.0); protected boolean polar; //Set up computing the boolean. It is hard coded for now. /* public TraPPEParams(Element[] elements, double[] sigma, double[] epsilon, double[] charge) { this.elements = elements; this.sigma = sigma; this.epsilon = epsilon; this.charge = charge; } public TraPPEParams N2params = new TraPPEParams(new Element[]{new ElementSimple("M",0),Nitrogen.INSTANCE}, new double[]{0,3.31}, new double[]{0,Kelvin.UNIT.toSim(36)}, new double[]{Electron.UNIT.toSim(0.964),Electron.UNIT.toSim(-0.482)}) */ public TraPPEParams(Space space, ChemForm chemForm){ if(chemForm == ChemForm.N2) { //Atoms in Compound AtomType typeM = new AtomType(elementM); AtomType typeN = new AtomType(Nitrogen.INSTANCE); atomTypes = new AtomType[]{typeM,typeN}; int[] atomCount = new int[]{1,2}; //TraPPE Parameters double bondLength = 1.10; // Angstrom double sigmaN = 3.31; // Angstrom double epsilonN = Kelvin.UNIT.toSim(36.0); double qN = Electron.UNIT.toSim(-0.482); double sigmaM = 0.0; // Angstrom double epsilonM = Kelvin.UNIT.toSim(0.0); double qM = Electron.UNIT.toSim(0.964); //Construct Arrays sigma = new double[] {sigmaM,sigmaN}; epsilon = new double[] {epsilonM,epsilonN}; charge = new double[] {qM,qN}; //Get Coordinates Vector3D posM = new Vector3D(new double[] {0,0,0}); Vector3D posN1 = new Vector3D(new double[] {-bondLength/2,0,0}); Vector3D posN2 = new Vector3D(new double[] {+bondLength/2,0,0}); Vector[] pos = new Vector[]{posM,posN1,posN2}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } else if (chemForm == ChemForm.O2) { //Atoms in Compound AtomType typeM = new AtomType(elementM); AtomType typeO = new AtomType(Oxygen.INSTANCE); atomTypes = new AtomType[]{typeM,typeO}; int[] atomCount = new int[]{1,2}; //TraPPE Parameters double bondLength = 1.210; // Angstrom double sigmaO = 3.020; // Angstrom double epsilonO = Kelvin.UNIT.toSim(49.0); double qO = Electron.UNIT.toSim(-0.113); double sigmaM = 0.0; // Angstrom double epsilonM = Kelvin.UNIT.toSim(0.0); double qM = Electron.UNIT.toSim(0.226); //Construct Arrays sigma = new double[] {sigmaM,sigmaO}; epsilon = new double[] {epsilonM,epsilonO}; charge = new double[] {qM,qO}; //Get Coordinates Vector3D posM = new Vector3D(new double[] {0,0,0}); Vector3D posO1 = new Vector3D(new double[] {-bondLength/2,0,0}); Vector3D posO2 = new Vector3D(new double[] {+bondLength/2,0,0}); Vector[] pos = new Vector[]{posM,posO1,posO2}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } else if (chemForm == ChemForm.CO2) { //Atoms in Compound AtomType typeC = new AtomType(Carbon.INSTANCE); AtomType typeO = new AtomType(Oxygen.INSTANCE); atomTypes = new AtomType[]{typeC,typeO}; int[] atomCount = new int[] {1,2}; //TraPPE Parameters double bondLengthCO = 1.160; // Angstrom double sigmaC = 2.800; // Angstrom double epsilonC = Kelvin.UNIT.toSim(27.0); double qC = Electron.UNIT.toSim(0.700); double sigmaO = 3.050; // Angstrom double epsilonO = Kelvin.UNIT.toSim(79.0); double qO = Electron.UNIT.toSim(-0.350); //Construct Arrays sigma = new double[] {sigmaC,sigmaO}; epsilon = new double[] {epsilonC,epsilonO}; charge = new double[] {qC,qO}; //Get Coordinates Vector3D posC = new Vector3D(new double[] {0,0,0}); Vector3D posO1 = new Vector3D(new double[] {-bondLengthCO,0,0}); Vector3D posO2 = new Vector3D(new double[] {+bondLengthCO,0,0}); Vector[] pos = new Vector[]{posC,posO1,posO2}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } else if (chemForm == ChemForm.NH3) { //Atom in Compound AtomType typeN = new AtomType(Nitrogen.INSTANCE); AtomType typeH = new AtomType(Hydrogen.INSTANCE); AtomType typeM = new AtomType(elementM); atomTypes = new AtomType[]{typeN,typeH,typeM}; int[] atomCount = new int[] {1,3,1}; polar = true; //TraPPE Parameters double bondLengthNH = 1.012; // Angstrom double bondLengthNM = 0.080; // Angstrom double thetaHNM = Degree.UNIT.toSim(67.9) ; double thetaHNH = Degree.UNIT.toSim(106.7); double thetaHNHxy = Degree.UNIT.toSim(60); double sigmaN = 3.420; // Angstrom double epsilonN = Kelvin.UNIT.toSim(185.0); double qN = Electron.UNIT.toSim(0.0); double sigmaH = 0.0; // Angstrom double epsilonH = Kelvin.UNIT.toSim(0.0); double qH = Electron.UNIT.toSim(0.410); double sigmaM = 0.0; // Angstrom double epsilonM = Kelvin.UNIT.toSim(0.0); double qM = Electron.UNIT.toSim(-1.230); //Construct Arrays sigma = new double[] {sigmaN,sigmaH,sigmaM}; epsilon = new double[] {epsilonN,epsilonH,epsilonM}; charge = new double[] {qN,qH,qM}; //Get Coordinates Vector3D posN = new Vector3D(new double[] {0,0,0}); Vector3D posH1 = new Vector3D(new double[] {bondLengthNH*Math.sin(thetaHNM),0,-bondLengthNH*Math.cos(thetaHNM)}); Vector3D posH2 = new Vector3D(new double[] {-bondLengthNH*Math.sin(thetaHNM)*Math.cos(thetaHNHxy),bondLengthNH*Math.sin(thetaHNM)*Math.sin(thetaHNHxy),-bondLengthNH*Math.cos(thetaHNM)}); Vector3D posH3 = new Vector3D(new double[] {-bondLengthNH*Math.sin(thetaHNM)*Math.cos(thetaHNHxy),-bondLengthNH*Math.sin(thetaHNM)*Math.sin(thetaHNHxy),-bondLengthNH*Math.cos(thetaHNM)}); Vector3D posM = new Vector3D(new double[] {0,0,-bondLengthNM}); Vector[] pos = new Vector[]{posN,posH1,posH2,posH3,posM}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } } } }
etomica-apps/src/main/java/etomica/virial/simulations/VirialTraPPE.java
/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package etomica.virial.simulations; import etomica.action.IAction; import etomica.atom.AtomType; import etomica.box.Box; import etomica.chem.elements.*; import etomica.data.histogram.HistogramSimple; import etomica.graphics.ColorSchemeRandomByMolecule; import etomica.graphics.DisplayBox; import etomica.graphics.DisplayBoxCanvasG3DSys; import etomica.graphics.SimulationGraphic; import etomica.integrator.IntegratorEvent; import etomica.integrator.IntegratorListener; import etomica.math.DoubleRange; import etomica.math.SpecialFunctions; import etomica.molecule.IMoleculeList; import etomica.molecule.MoleculePositionCOM; import etomica.potential.IPotential; import etomica.potential.P2PotentialGroupBuilder; import etomica.potential.PotentialGroup; import etomica.space.Space; import etomica.space.Vector; import etomica.space3d.Space3D; import etomica.space3d.Vector3D; import etomica.species.Species; import etomica.species.SpeciesBuilder; import etomica.units.Degree; import etomica.units.Electron; import etomica.units.Kelvin; import etomica.util.ParameterBase; import etomica.util.ParseArgs; import etomica.util.random.RandomMersenneTwister; import etomica.virial.*; import etomica.virial.cluster.Standard; import java.awt.*; import java.util.Arrays; /** * Compute pure and binary mixture virial coefficients using overlap sampling simulations * for some molecules using the TraPPE force fields. * */ public class VirialTraPPE { public static void main(String[] args) { VirialParam params = new VirialParam(); boolean isCommandline = args.length > 0; if (isCommandline) { ParseArgs.doParseArgs(params, args); } else { // customize parameters here params.chemForm = new ChemForm[]{ChemForm.N2,ChemForm.O2}; params.nPoints = 2; params.nTypes = new int[]{1,1}; params.nDer = 3; params.temperature = 450; params.numSteps = 1000000; params.refFrac = -1; params.sigmaHSRef = 5; params.seed = null; params.doHist = false; params.dorefpref = false; params.doChainRef = true; params.BDtol = 1e-12; } final ChemForm[] chemForm = params.chemForm; final int nPoints = params.nPoints; final int[] nTypes = params.nTypes; final int nDer = params.nDer; final double temperatureK = params.temperature; final long steps = params.numSteps; double refFrac = params.refFrac; double sigmaHSRef = params.sigmaHSRef; int[] seed = params.seed; boolean doHist = params.doHist; boolean dorefpref = params.dorefpref; boolean doChainRef = params.doChainRef; double BDtol = params.BDtol; final double BDAccFrac = 0.001; //if( chemForm.length == 0 ) throw new RuntimeException("chemForm length is wrong!"); /*for(int i=0; i<chemForm.length; i++){ if( chemForm[i]== null) throw new RuntimeException("chemForm["+i+"] is null!"); }*/ //if( nTypes.length == 0 ) throw new RuntimeException("nTypes length is wrong!"); /*for(int i=0; i<nTypes.length; i++){ if( nTypes[i]== 0) throw new RuntimeException("nTypes["+i+"] is 0!"); }*/ if( chemForm.length != nTypes.length ) throw new RuntimeException("chemFrom and nTypes lengths are unequal!"); if( chemForm.length > 1 && Arrays.stream(nTypes).sum() != nPoints ) throw new RuntimeException("nPoints and nTypes do not match!"); /* if(chemForm.length > 1) { for(int i=0; i<chemForm.length; i++){ for(int j=i+1; j<chemForm.length; j++){ if( chemForm[i] == chemForm[j] ) throw new RuntimeException("chemForm["+i+"] and chemForm["+j+"] are "+chemForm[i]+". No repetitions in chemForm allowed!"); } } }*/ //if( chemForm[0] == null || nTypes[0] == 0 ) throw new RuntimeException("Reformat input"); //if( chemForm.length > 1 && chemForm[1] == null && nTypes[1] != 0 ) throw new RuntimeException("chemForm 1 incomplete!"); boolean isMixture = ( nTypes.length > 1 ) ; if(isMixture){ for(int i=0; i<nTypes.length; i++){ if(nTypes[i]==nPoints) isMixture=false; } } double temperature = Kelvin.UNIT.toSim(temperatureK); long blockSize = 1000; int EqSubSteps = 1000; double vhs = (4.0 / 3.0) * Math.PI * sigmaHSRef * sigmaHSRef * sigmaHSRef; final double HSBn = doChainRef ? SpecialFunctions.factorial(nPoints) / 2 * Math.pow(vhs, nPoints - 1) : Standard.BHS(nPoints, sigmaHSRef); if(!isMixture) { ChemForm chemFormPure = chemForm[0]; if(nTypes.length>1) { for(int i=0; i<nTypes.length; i++){ if(nTypes[i]==nPoints) chemFormPure=chemForm[i]; } } System.out.println("Overlap sampling for TraPPE " + chemFormPure + " at " + temperatureK + " K " + "for B" + nPoints + " and " + nDer + " derivatives"); } else{ String nTstr="{"; for(int i=0; i<nTypes.length; i++){ if(nTypes[i]!=0) nTstr += ((nTstr=="{") ? "":",")+nTypes[i]; } nTstr+="}"; String CFstr=""; for(int i=0; i<chemForm.length; i++){ if(nTypes[i]!=0) CFstr += chemForm[i]+" "; } System.out.println("Overlap sampling for TraPPE " + CFstr + " " +nTstr + " Mixture at " + temperatureK + " K " + "for B" + nPoints + " and " + nDer + " derivatives"); } System.out.println("Reference diagram: B"+nPoints+" for hard spheres with diameter " + sigmaHSRef + " Angstroms"); System.out.println(" B"+nPoints+"HS: "+HSBn); System.out.println(steps + " steps (" + (steps / blockSize) + " blocks of " + blockSize + ")"); Space space = Space3D.getInstance(); MayerFunction fRefPos = new MayerFunction() { public void setBox(Box box) { } public IPotential getPotential() { return null; } public double f(IMoleculeList pair, double r2, double beta) { return r2 < sigmaHSRef * sigmaHSRef ? 1 : 0; } }; // Setting up reference cluster MayerHardSphere fRef = new MayerHardSphere(sigmaHSRef); ClusterAbstract refCluster = doChainRef ? new ClusterChainHS(nPoints, fRefPos) : new ClusterWheatleyHS(nPoints, fRef); refCluster.setTemperature(temperature); //Setting up target cluster Species species[] = null; ClusterAbstractMultivalue targetCluster = null; ClusterAbstractMultivalue targetClusterBD = null; boolean allPolar = true; MayerFunction[][] fAll = new MayerFunction[nTypes.length][nTypes.length]; species = new Species[chemForm.length]; TraPPEParams[] TPList = new TraPPEParams[chemForm.length]; for(int i=0; i<TPList.length; i++){ TPList[i] = new TraPPEParams(space, chemForm[i]); } for(int i=0; i<chemForm.length; i++){ TraPPEParams TPi = TPList[i]; PotentialGroup PGii = TPi.potentialGroup; Species speciesi = TPi.species; species[i] = speciesi; P2PotentialGroupBuilder.ModelParams MPi = new P2PotentialGroupBuilder.ModelParams(TPi.atomTypes,TPi.sigma,TPi.epsilon,TPi.charge); fAll[i][i] = new MayerGeneral(PGii); allPolar=(allPolar&&TPi.polar); for(int j=i+1; j<chemForm.length; j++){ TraPPEParams TPj = TPList[j]; P2PotentialGroupBuilder.ModelParams MPj = new P2PotentialGroupBuilder.ModelParams(TPj.atomTypes,TPj.sigma,TPj.epsilon,TPj.charge); PotentialGroup PGij = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,MPi,MPj); fAll[i][j] = fAll[j][i] = new MayerGeneral(PGij); } } targetCluster = new ClusterWheatleySoftDerivativesMix(nPoints, nTypes,fAll, BDtol, nDer); targetCluster.setTemperature(temperature); if(allPolar && nPoints==2) { System.out.println("Performing Flipping"); ((ClusterWheatleySoftDerivativesMix) targetCluster).setTolerance(0); final int precision = -3*(int)Math.log10(BDtol); targetClusterBD = new ClusterWheatleySoftDerivativesMixBD(nPoints,nTypes,fAll,precision,nDer); targetClusterBD.setTemperature(temperature); ((ClusterWheatleySoftDerivativesMix) targetCluster).setDoCaching(false); ((ClusterWheatleySoftDerivativesMixBD) targetClusterBD).setDoCaching(false); targetCluster = new ClusterCoupledFlippedMultivalue(targetCluster, targetClusterBD, space, 20, nDer, BDtol); } //System.exit(1); final SimulationVirialOverlap2 sim = new SimulationVirialOverlap2(space, species, nTypes, temperature,refCluster,targetCluster); if(seed!=null)sim.setRandom(new RandomMersenneTwister(seed)); System.out.println("random seeds: "+ Arrays.toString(seed==null?sim.getRandomSeeds():seed)); if(targetCluster instanceof ClusterCoupledFlippedMultivalue) { ((ClusterCoupledFlippedMultivalue) targetCluster).setBDAccFrac(BDAccFrac,sim.getRandom()); } else { ((ClusterWheatleySoftDerivativesMix) targetCluster).setBDAccFrac(BDAccFrac,sim.getRandom()); } ClusterMultiToSingle[] primes = new ClusterMultiToSingle[nDer]; for(int m=0;m<primes.length;m++){ primes[m]= new ClusterMultiToSingle(((ClusterAbstractMultivalue) targetCluster), m+1); } sim.setExtraTargetClusters(primes); sim.init(); sim.box[0].setPositionDefinition(new MoleculePositionCOM(space)); sim.box[1].setPositionDefinition(new MoleculePositionCOM(space)); if (doChainRef) { sim.integrators[0].getMoveManager().removeMCMove(sim.mcMoveTranslate[0]); MCMoveClusterMoleculeHSChain mcMoveHSC = new MCMoveClusterMoleculeHSChain(sim.getRandom(), space, sigmaHSRef); sim.integrators[0].getMoveManager().addMCMove(mcMoveHSC); sim.accumulators[0].setBlockSize(1); } sim.integratorOS.setNumSubSteps(EqSubSteps); sim.integratorOS.setAggressiveAdjustStepFraction(true); if (false) { sim.box[0].getBoundary().setBoxSize(space.makeVector(new double[]{10,10,10})); sim.box[1].getBoundary().setBoxSize(space.makeVector(new double[]{10,10,10})); SimulationGraphic simGraphic = new SimulationGraphic(sim, SimulationGraphic.TABBED_PANE); DisplayBox displayBox0 = simGraphic.getDisplayBox(sim.box[0]); DisplayBox displayBox1 = simGraphic.getDisplayBox(sim.box[1]); // displayBox0.setPixelUnit(new Pixel(300.0/size)); // displayBox1.setPixelUnit(new Pixel(300.0/size)); displayBox0.setShowBoundary(false); displayBox1.setShowBoundary(false); ((DisplayBoxCanvasG3DSys)displayBox0.canvas).setBackgroundColor(Color.WHITE); ((DisplayBoxCanvasG3DSys)displayBox1.canvas).setBackgroundColor(Color.WHITE); ColorSchemeRandomByMolecule colorScheme = new ColorSchemeRandomByMolecule(sim, sim.box[0], sim.getRandom()); displayBox0.setColorScheme(colorScheme); colorScheme = new ColorSchemeRandomByMolecule(sim, sim.box[1], sim.getRandom()); displayBox1.setColorScheme(colorScheme); simGraphic.makeAndDisplayFrame(); sim.integratorOS.setNumSubSteps(1000); sim.setAccumulatorBlockSize(1000); // if running interactively, set filename to null so that it doens't read // (or write) to a refpref file sim.getController().removeAction(sim.ai); sim.getController().addAction(new IAction() { public void actionPerformed() { sim.initRefPref(null, 10); sim.equilibrate(null, 20); sim.ai.setMaxSteps(Long.MAX_VALUE); } }); sim.getController().addAction(sim.ai); if ((Double.isNaN(sim.refPref) || Double.isInfinite(sim.refPref) || sim.refPref == 0)) { throw new RuntimeException("Oops"); } return; } long t1 = System.currentTimeMillis(); if (refFrac >= 0) { sim.integratorOS.setRefStepFraction(refFrac); sim.integratorOS.setAdjustStepFraction(false); } System.out.println(); String refFileName = null; if (isCommandline) { // if running interactively, don't use the file String tempString = ""+temperatureK; if (temperatureK == (int)temperatureK) { // temperature is an integer, use "200" instead of "200.0" tempString = ""+(int)temperatureK; } refFileName = "refpref_"+"_"+nPoints+"_"+tempString+"K"; } final HistogramSimple targHist = new HistogramSimple(200, new DoubleRange(-1, 4)); IntegratorListener histListenerTarget = new IntegratorListener() { public void integratorStepStarted(IntegratorEvent e) {} public void integratorStepFinished(IntegratorEvent e) { CoordinatePairSet cPairs = sim.box[1].getCPairSet(); for (int i=0; i<nPoints; i++) { for (int j=i+1; j<nPoints; j++) { double r2 = cPairs.getr2(i, j); double r = Math.sqrt(r2); if (r > 1) { r = Math.log(r); } else { r -= 1; } targHist.addValue(r); } } } public void integratorInitialized(IntegratorEvent e) {} }; if (doHist) { System.out.println("collecting histograms"); // only collect the histogram if we're forcing it to run the reference system sim.integrators[1].getEventManager().addListener(histListenerTarget); } sim.initRefPref(refFileName, (steps / EqSubSteps) / 20); sim.equilibrate(refFileName, (steps / EqSubSteps) / 10); System.out.println("equilibration finished"); if(dorefpref){ long t2 = System.currentTimeMillis(); System.out.println("time: "+(t2-t1)/1000.0); return; } IntegratorListener progressReport = new IntegratorListener() { public void integratorStepStarted(IntegratorEvent e) {} public void integratorStepFinished(IntegratorEvent e) { if (sim.integratorOS.getStepCount() % 100 != 0) return; System.out.print(sim.integratorOS.getStepCount()+" steps: "); double[] ratioAndError = sim.dvo.getAverageAndError(); System.out.println("abs average: "+ratioAndError[0]*HSBn+", error: "+ratioAndError[1]*HSBn); } public void integratorInitialized(IntegratorEvent e) {} }; if (false) { sim.integratorOS.getEventManager().addListener(progressReport); } sim.integratorOS.setNumSubSteps((int) blockSize); sim.setAccumulatorBlockSize(blockSize); if (doChainRef) sim.accumulators[0].setBlockSize(1); sim.ai.setMaxSteps(steps / blockSize); for (int i=0; i<2; i++) { if (i > 0 || !doChainRef) System.out.println("MC Move step sizes " + sim.mcMoveTranslate[i].getStepSize()); } sim.getController().actionPerformed(); if (doHist) { double[] xValues = targHist.xValues(); double[] h = targHist.getHistogram(); for (int i=0; i<xValues.length; i++) { if (!Double.isNaN(h[i])) { double r = xValues[i]; double y = h[i]; if (r < 0) r += 1; else { r = Math.exp(r); y /= r; } System.out.println(r+" "+y); } } } System.out.println("final reference step fraction "+sim.integratorOS.getIdealRefStepFraction()); System.out.println("actual reference step fraction "+sim.integratorOS.getRefStepFraction()); String[] extraNames = new String[nDer]; for (int i = 1; i <= nDer; i++) { extraNames[i - 1] = "derivative " + i; } sim.printResults(HSBn, extraNames); long t2 = System.currentTimeMillis(); System.out.println("time: "+(t2-t1)/1000.0); } enum ChemForm { N2, O2, CO2, NH3 } /** * Inner class for parameters */ public static class VirialParam extends ParameterBase { // don't change these public ChemForm[] chemForm = {ChemForm.N2}; public int nPoints = 2; public int[] nTypes = {0}; public int nDer = 3; public double temperature = 400; public long numSteps = 1000000; public double refFrac = -1; public double sigmaHSRef = 5; public int[] seed = null; public boolean doHist = false; public boolean dorefpref = false; public boolean doChainRef = true; public double BDtol = 1e-12; } public static class TraPPEParams{ protected AtomType[] atomTypes; protected double[] sigma; protected double[] epsilon; protected double[] charge; protected Species species; protected PotentialGroup potentialGroup; protected static Element elementM = new ElementSimple("M", 0.0); protected boolean polar; //Set up computing the boolean. It is hard coded for now. /* public TraPPEParams(Element[] elements, double[] sigma, double[] epsilon, double[] charge) { this.elements = elements; this.sigma = sigma; this.epsilon = epsilon; this.charge = charge; } public TraPPEParams N2params = new TraPPEParams(new Element[]{new ElementSimple("M",0),Nitrogen.INSTANCE}, new double[]{0,3.31}, new double[]{0,Kelvin.UNIT.toSim(36)}, new double[]{Electron.UNIT.toSim(0.964),Electron.UNIT.toSim(-0.482)}) */ public TraPPEParams(Space space, ChemForm chemForm){ if(chemForm == ChemForm.N2) { //Atoms in Compound AtomType typeM = new AtomType(elementM); AtomType typeN = new AtomType(Nitrogen.INSTANCE); atomTypes = new AtomType[]{typeM,typeN}; int[] atomCount = new int[]{1,2}; //TraPPE Parameters double bondLength = 1.10; // Angstrom double sigmaN = 3.31; // Angstrom double epsilonN = Kelvin.UNIT.toSim(36.0); double qN = Electron.UNIT.toSim(-0.482); double sigmaM = 0.0; // Angstrom double epsilonM = Kelvin.UNIT.toSim(0.0); double qM = Electron.UNIT.toSim(0.964); //Construct Arrays sigma = new double[] {sigmaM,sigmaN}; epsilon = new double[] {epsilonM,epsilonN}; charge = new double[] {qM,qN}; //Get Coordinates Vector3D posM = new Vector3D(new double[] {0,0,0}); Vector3D posN1 = new Vector3D(new double[] {-bondLength/2,0,0}); Vector3D posN2 = new Vector3D(new double[] {+bondLength/2,0,0}); Vector[] pos = new Vector[]{posM,posN1,posN2}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } else if (chemForm == ChemForm.O2) { //Atoms in Compound AtomType typeM = new AtomType(elementM); AtomType typeO = new AtomType(Oxygen.INSTANCE); atomTypes = new AtomType[]{typeM,typeO}; int[] atomCount = new int[]{1,2}; //TraPPE Parameters double bondLength = 1.210; // Angstrom double sigmaO = 3.020; // Angstrom double epsilonO = Kelvin.UNIT.toSim(49.0); double qO = Electron.UNIT.toSim(-0.113); double sigmaM = 0.0; // Angstrom double epsilonM = Kelvin.UNIT.toSim(0.0); double qM = Electron.UNIT.toSim(0.226); //Construct Arrays sigma = new double[] {sigmaM,sigmaO}; epsilon = new double[] {epsilonM,epsilonO}; charge = new double[] {qM,qO}; //Get Coordinates Vector3D posM = new Vector3D(new double[] {0,0,0}); Vector3D posO1 = new Vector3D(new double[] {-bondLength/2,0,0}); Vector3D posO2 = new Vector3D(new double[] {+bondLength/2,0,0}); Vector[] pos = new Vector[]{posM,posO1,posO2}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } else if (chemForm == ChemForm.CO2) { //Atoms in Compound AtomType typeC = new AtomType(Carbon.INSTANCE); AtomType typeO = new AtomType(Oxygen.INSTANCE); atomTypes = new AtomType[]{typeC,typeO}; int[] atomCount = new int[] {1,2}; //TraPPE Parameters double bondLengthCO = 1.160; // Angstrom double sigmaC = 2.800; // Angstrom double epsilonC = Kelvin.UNIT.toSim(27.0); double qC = Electron.UNIT.toSim(0.700); double sigmaO = 3.050; // Angstrom double epsilonO = Kelvin.UNIT.toSim(79.0); double qO = Electron.UNIT.toSim(-0.350); //Construct Arrays sigma = new double[] {sigmaC,sigmaO}; epsilon = new double[] {epsilonC,epsilonO}; charge = new double[] {qC,qO}; //Get Coordinates Vector3D posC = new Vector3D(new double[] {0,0,0}); Vector3D posO1 = new Vector3D(new double[] {-bondLengthCO,0,0}); Vector3D posO2 = new Vector3D(new double[] {+bondLengthCO,0,0}); Vector[] pos = new Vector[]{posC,posO1,posO2}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } else if (chemForm == ChemForm.NH3) { //Atom in Compound AtomType typeN = new AtomType(Nitrogen.INSTANCE); AtomType typeH = new AtomType(Hydrogen.INSTANCE); AtomType typeM = new AtomType(elementM); atomTypes = new AtomType[]{typeN,typeH,typeM}; int[] atomCount = new int[] {1,3,1}; polar = true; //TraPPE Parameters double bondLengthNH = 1.012; // Angstrom double bondLengthNM = 0.080; // Angstrom double thetaHNM = Degree.UNIT.toSim(67.9) ; double thetaHNH = Degree.UNIT.toSim(106.7); double thetaHNHxy = Degree.UNIT.toSim(60); double sigmaN = 3.420; // Angstrom double epsilonN = Kelvin.UNIT.toSim(185.0); double qN = Electron.UNIT.toSim(0.0); double sigmaH = 0.0; // Angstrom double epsilonH = Kelvin.UNIT.toSim(0.0); double qH = Electron.UNIT.toSim(0.410); double sigmaM = 0.0; // Angstrom double epsilonM = Kelvin.UNIT.toSim(0.0); double qM = Electron.UNIT.toSim(-1.230); //Construct Arrays sigma = new double[] {sigmaN,sigmaH,sigmaM}; epsilon = new double[] {epsilonN,epsilonH,epsilonM}; charge = new double[] {qN,qH,qM}; //Get Coordinates Vector3D posN = new Vector3D(new double[] {0,0,0}); Vector3D posH1 = new Vector3D(new double[] {bondLengthNH*Math.sin(thetaHNM),0,-bondLengthNH*Math.cos(thetaHNM)}); Vector3D posH2 = new Vector3D(new double[] {-bondLengthNH*Math.sin(thetaHNM)*Math.cos(thetaHNHxy),bondLengthNH*Math.sin(thetaHNM)*Math.sin(thetaHNHxy),-bondLengthNH*Math.cos(thetaHNM)}); Vector3D posH3 = new Vector3D(new double[] {-bondLengthNH*Math.sin(thetaHNM)*Math.cos(thetaHNHxy),-bondLengthNH*Math.sin(thetaHNM)*Math.sin(thetaHNHxy),-bondLengthNH*Math.cos(thetaHNM)}); Vector3D posM = new Vector3D(new double[] {0,0,-bondLengthNM}); Vector[] pos = new Vector[]{posN,posH1,posH2,posH3,posM}; //Set Geometry species = SpeciesBuilder.SpeciesBuilder(space,atomTypes,atomCount,pos); //Set Potential P2PotentialGroupBuilder.ModelParams modelParams = new P2PotentialGroupBuilder.ModelParams(atomTypes,sigma,epsilon,charge); potentialGroup = P2PotentialGroupBuilder.P2PotentialGroupBuilder(space,modelParams,null); } } } }
Fixed number of blocks to 1000 and with a variable block size.
etomica-apps/src/main/java/etomica/virial/simulations/VirialTraPPE.java
Fixed number of blocks to 1000 and with a variable block size.
Java
agpl-3.0
d79bdae61f225424d836240efca9e5baf72809e1
0
SensNet/SensNet,SensNet/SensNet,SensNet/SensNet
package net.sensnet.node.plugins; import net.sensnet.node.SensNetNodeConfiguration; import net.sensnet.node.pages.api.json.DatapointJSONApiPage; import org.cacert.gigi.output.template.Template; public abstract class VisualizerPlugin extends Plugin { private Template defaultTemplate; public VisualizerPlugin(SensNetNodeConfiguration configuration) { super(configuration); try { defaultTemplate = new Template(getClass().getResource( getClass().getSimpleName() + ".templ")); } catch (Exception e) { getLoger().info("No fitting template found."); } } public abstract String getSensorName(); public abstract int getSensorType(); public Template getTemplate() { return defaultTemplate; } public abstract DatapointJSONApiPage getDatapointJSONApiPage(); }
src/net/sensnet/node/plugins/VisualizerPlugin.java
package net.sensnet.node.plugins; import net.sensnet.node.SensNetNodeConfiguration; import net.sensnet.node.pages.api.json.DatapointJSONApiPage; import com.sun.org.apache.xalan.internal.xsltc.compiler.Template; public abstract class VisualizerPlugin extends Plugin { public VisualizerPlugin(SensNetNodeConfiguration configuration) { super(configuration); } public abstract String getSensorName(); public abstract int getSensorType(); public abstract Template getTemplate(); public abstract DatapointJSONApiPage getDatapointJSONApiPage(); }
assume a default template
src/net/sensnet/node/plugins/VisualizerPlugin.java
assume a default template
Java
agpl-3.0
c9543aa6d582a716a66d3e5cd911b0cba2ac7065
0
kalletlak/cbioportal,bihealth/cbioportal,pughlab/cbioportal,mandawilson/cbioportal,n1zea144/cbioportal,pughlab/cbioportal,shrumit/cbioportal-gsoc-final,n1zea144/cbioportal,shrumit/cbioportal-gsoc-final,IntersectAustralia/cbioportal,jjgao/cbioportal,cBioPortal/cbioportal,angelicaochoa/cbioportal,d3b-center/pedcbioportal,d3b-center/pedcbioportal,gsun83/cbioportal,inodb/cbioportal,cBioPortal/cbioportal,zhx828/cbioportal,sheridancbio/cbioportal,inodb/cbioportal,bihealth/cbioportal,zhx828/cbioportal,IntersectAustralia/cbioportal,d3b-center/pedcbioportal,jjgao/cbioportal,inodb/cbioportal,angelicaochoa/cbioportal,gsun83/cbioportal,sheridancbio/cbioportal,cBioPortal/cbioportal,adamabeshouse/cbioportal,kalletlak/cbioportal,shrumit/cbioportal-gsoc-final,zhx828/cbioportal,jjgao/cbioportal,inodb/cbioportal,jjgao/cbioportal,pughlab/cbioportal,inodb/cbioportal,angelicaochoa/cbioportal,onursumer/cbioportal,gsun83/cbioportal,mandawilson/cbioportal,zhx828/cbioportal,d3b-center/pedcbioportal,n1zea144/cbioportal,cBioPortal/cbioportal,cBioPortal/cbioportal,mandawilson/cbioportal,angelicaochoa/cbioportal,onursumer/cbioportal,onursumer/cbioportal,kalletlak/cbioportal,yichaoS/cbioportal,adamabeshouse/cbioportal,jjgao/cbioportal,kalletlak/cbioportal,sheridancbio/cbioportal,pughlab/cbioportal,pughlab/cbioportal,n1zea144/cbioportal,adamabeshouse/cbioportal,zhx828/cbioportal,adamabeshouse/cbioportal,pughlab/cbioportal,mandawilson/cbioportal,yichaoS/cbioportal,yichaoS/cbioportal,d3b-center/pedcbioportal,IntersectAustralia/cbioportal,kalletlak/cbioportal,gsun83/cbioportal,pughlab/cbioportal,shrumit/cbioportal-gsoc-final,yichaoS/cbioportal,IntersectAustralia/cbioportal,mandawilson/cbioportal,jjgao/cbioportal,n1zea144/cbioportal,kalletlak/cbioportal,mandawilson/cbioportal,yichaoS/cbioportal,angelicaochoa/cbioportal,zhx828/cbioportal,bihealth/cbioportal,n1zea144/cbioportal,adamabeshouse/cbioportal,shrumit/cbioportal-gsoc-final,gsun83/cbioportal,onursumer/cbioportal,adamabeshouse/cbioportal,jjgao/cbioportal,sheridancbio/cbioportal,yichaoS/cbioportal,gsun83/cbioportal,bihealth/cbioportal,zhx828/cbioportal,adamabeshouse/cbioportal,gsun83/cbioportal,angelicaochoa/cbioportal,kalletlak/cbioportal,mandawilson/cbioportal,inodb/cbioportal,IntersectAustralia/cbioportal,IntersectAustralia/cbioportal,onursumer/cbioportal,bihealth/cbioportal,shrumit/cbioportal-gsoc-final,onursumer/cbioportal,IntersectAustralia/cbioportal,cBioPortal/cbioportal,sheridancbio/cbioportal,n1zea144/cbioportal,yichaoS/cbioportal,shrumit/cbioportal-gsoc-final,angelicaochoa/cbioportal,d3b-center/pedcbioportal,bihealth/cbioportal,sheridancbio/cbioportal,d3b-center/pedcbioportal,bihealth/cbioportal,inodb/cbioportal
/* * Copyright (c) 2015 Memorial Sloan-Kettering Cancer Center. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS * FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder * is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no * obligations to provide maintenance, support, updates, enhancements or * modifications. In no event shall Memorial Sloan-Kettering Cancer Center be * liable to any party for direct, indirect, special, incidental or * consequential damages, including lost profits, arising out of the use of this * software and its documentation, even if Memorial Sloan-Kettering Cancer * Center has been advised of the possibility of such damage. */ /* * This file is part of cBioPortal. * * cBioPortal is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.mskcc.cbio.portal.scripts; import org.mskcc.cbio.portal.dao.*; import org.mskcc.cbio.portal.model.*; import org.mskcc.cbio.portal.util.*; import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import java.io.*; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; /** * Code to Import Copy Number Alteration or MRNA Expression Data. * * @author Ethan Cerami */ public class ImportTabDelimData { private HashSet<Long> importedGeneSet = new HashSet<Long>(); private static Logger logger = Logger.getLogger(ImportTabDelimData.class); /** * Barry Target Line: A constant currently used to indicate the RAE method. */ public static final String BARRY_TARGET = "Barry"; /** * Consensus Target Line: A constant currently used to indicate consensus of multiple * CNA calling algorithms. */ public static final String CONSENSUS_TARGET = "consensus"; private ProgressMonitor pMonitor; private File mutationFile; private String targetLine; private int geneticProfileId; private GeneticProfile geneticProfile; private HashSet<String> microRnaIdSet; /** * Constructor. * * @param dataFile Data File containing CNA data. * @param targetLine The line we want to import. * If null, all lines are imported. * @param geneticProfileId GeneticProfile ID. * @param pMonitor Progress Monitor Object. */ public ImportTabDelimData(File dataFile, String targetLine, int geneticProfileId, ProgressMonitor pMonitor) { this.mutationFile = dataFile; this.targetLine = targetLine; this.geneticProfileId = geneticProfileId; this.pMonitor = pMonitor; } /** * Constructor. * * @param dataFile Data File containing CNA data. * @param geneticProfileId GeneticProfile ID. * @param pMonitor Progress Monitor Object. */ public ImportTabDelimData(File dataFile, int geneticProfileId, ProgressMonitor pMonitor) { this.mutationFile = dataFile; this.geneticProfileId = geneticProfileId; this.pMonitor = pMonitor; } /** * Import the CNA Data. * * @throws IOException IO Error. * @throws DaoException Database Error. */ public void importData() throws IOException, DaoException { DaoMicroRna daoMicroRna = new DaoMicroRna(); microRnaIdSet = daoMicroRna.getEntireSet(); geneticProfile = DaoGeneticProfile.getGeneticProfileById(geneticProfileId); FileReader reader = new FileReader(mutationFile); BufferedReader buf = new BufferedReader(reader); String headerLine = buf.readLine(); String parts[] = headerLine.split("\t"); int sampleStartIndex = getStartIndex(parts); int hugoSymbolIndex = getHugoSymbolIndex(parts); int entrezGeneIdIndex = getEntrezGeneIdIndex(parts); String sampleIds[]; // Branch, depending on targetLine setting if (targetLine == null) { sampleIds = new String[parts.length - sampleStartIndex]; System.arraycopy(parts, sampleStartIndex, sampleIds, 0, parts.length - sampleStartIndex); } else { sampleIds = new String[parts.length - sampleStartIndex]; System.arraycopy(parts, sampleStartIndex, sampleIds, 0, parts.length - sampleStartIndex); } ImportDataUtil.addPatients(sampleIds, geneticProfileId); ImportDataUtil.addSamples(sampleIds, geneticProfileId); pMonitor.setCurrentMessage("Import tab delimited data for " + sampleIds.length + " samples."); // Add Samples to the Database ArrayList <Integer> orderedSampleList = new ArrayList<Integer>(); ArrayList <Integer> filteredSampleIndices = new ArrayList<Integer>(); for (int i = 0; i < sampleIds.length; i++) { Sample sample = DaoSample.getSampleByCancerStudyAndSampleId(geneticProfile.getCancerStudyId(), StableIdUtil.getSampleId(sampleIds[i])); if (sample == null) { assert StableIdUtil.isNormal(sampleIds[i]); filteredSampleIndices.add(i); continue; } if (!DaoSampleProfile.sampleExistsInGeneticProfile(sample.getInternalId(), geneticProfileId)) { DaoSampleProfile.addSampleProfile(sample.getInternalId(), geneticProfileId); } orderedSampleList.add(sample.getInternalId()); } DaoGeneticProfileSamples.addGeneticProfileSamples(geneticProfileId, orderedSampleList); String line = buf.readLine(); int numRecordsStored = 0; DaoGeneOptimized daoGene = DaoGeneOptimized.getInstance(); DaoGeneticAlteration daoGeneticAlteration = DaoGeneticAlteration.getInstance(); boolean discritizedCnaProfile = geneticProfile!=null && geneticProfile.getGeneticAlterationType() == GeneticAlterationType.COPY_NUMBER_ALTERATION && geneticProfile.showProfileInAnalysisTab(); boolean rppaProfile = geneticProfile!=null && geneticProfile.getGeneticAlterationType() == GeneticAlterationType.PROTEIN_LEVEL && "Composite.Element.Ref".equalsIgnoreCase(parts[0]); Map<CnaEvent.Event, CnaEvent.Event> existingCnaEvents = null; long cnaEventId = 0; if (discritizedCnaProfile) { existingCnaEvents = new HashMap<CnaEvent.Event, CnaEvent.Event>(); for (CnaEvent.Event event : DaoCnaEvent.getAllCnaEvents()) { existingCnaEvents.put(event, event); } cnaEventId = DaoCnaEvent.getLargestCnaEventId(); MySQLbulkLoader.bulkLoadOn(); } int lenParts = parts.length; while (line != null) { if (pMonitor != null) { pMonitor.incrementCurValue(); ConsoleUtil.showProgress(pMonitor); } // Ignore lines starting with # if (!line.startsWith("#") && line.trim().length() > 0) { parts = line.split("\t",-1); if (parts.length>lenParts) { if (line.split("\t").length>lenParts) { System.err.println("The following line has more fields (" + parts.length + ") than the headers(" + lenParts + "): \n"+parts[0]); } } String values[] = (String[]) ArrayUtils.subarray(parts, sampleStartIndex, parts.length>lenParts?lenParts:parts.length); values = filterOutNormalValues(filteredSampleIndices, values); String hugo = parts[hugoSymbolIndex]; if (hugo!=null && hugo.isEmpty()) { hugo = null; } String entrez = null; if (entrezGeneIdIndex!=-1) { entrez = parts[entrezGeneIdIndex]; } if (entrez!=null && !entrez.matches("-?[0-9]+")) { entrez = null; } if (hugo != null || entrez != null) { if (hugo != null && (hugo.contains("///") || hugo.contains("---"))) { // Ignore gene IDs separated by ///. This indicates that // the line contains information regarding multiple genes, and // we cannot currently handle this. // Also, ignore gene IDs that are specified as ---. This indicates // the line contains information regarding an unknown gene, and // we cannot currently handle this. logger.debug("Ignoring gene ID: " + hugo); } else { List<CanonicalGene> genes = null; if (entrez!=null) { CanonicalGene gene = daoGene.getGene(Long.parseLong(entrez)); if (gene!=null) { genes = Arrays.asList(gene); } } if (genes==null && hugo != null) { if (rppaProfile) { genes = parseRPPAGenes(hugo); } else { // deal with multiple symbols separate by |, use the first one int ix = hugo.indexOf("|"); if (ix>0) { hugo = hugo.substring(0, ix); } genes = daoGene.guessGene(hugo); } } if (genes == null || genes.isEmpty()) { genes = Collections.emptyList(); } // If no target line is specified or we match the target, process. if (targetLine == null || parts[0].equals(targetLine)) { if (genes.isEmpty()) { // if gene is null, we might be dealing with a micro RNA ID if (hugo != null && hugo.toLowerCase().contains("-mir-")) { // if (microRnaIdSet.contains(geneId)) { // storeMicroRnaAlterations(values, daoMicroRnaAlteration, geneId); // numRecordsStored++; // } else { pMonitor.logWarning("microRNA is not known to me: [" + hugo + "]. Ignoring it " + "and all tab-delimited data associated with it!"); // } } else { String gene = (hugo != null) ? hugo : entrez; pMonitor.logWarning("Gene not found: [" + gene + "]. Ignoring it " + "and all tab-delimited data associated with it!"); } } else if (genes.size()==1) { if (discritizedCnaProfile) { long entrezGeneId = genes.get(0).getEntrezGeneId(); int n = values.length; if (n==0) System.out.println(); int i = values[0].equals(""+entrezGeneId) ? 1:0; for (; i<n; i++) { // temporary solution -- change partial deletion back to full deletion. if (values[i].equals(GeneticAlterationType.PARTIAL_DELETION)) { values[i] = GeneticAlterationType.HOMOZYGOUS_DELETION; } if (values[i].equals(GeneticAlterationType.AMPLIFICATION) // || values[i].equals(GeneticAlterationType.GAIN) // || values[i].equals(GeneticAlterationType.ZERO) // || values[i].equals(GeneticAlterationType.HEMIZYGOUS_DELETION) || values[i].equals(GeneticAlterationType.HOMOZYGOUS_DELETION)) { CnaEvent cnaEvent = new CnaEvent(orderedSampleList.get(i), geneticProfileId, entrezGeneId, Short.parseShort(values[i])); if (existingCnaEvents.containsKey(cnaEvent.getEvent())) { cnaEvent.setEventId(existingCnaEvents.get(cnaEvent.getEvent()).getEventId()); DaoCnaEvent.addCaseCnaEvent(cnaEvent, false); } else { cnaEvent.setEventId(++cnaEventId); DaoCnaEvent.addCaseCnaEvent(cnaEvent, true); existingCnaEvents.put(cnaEvent.getEvent(), cnaEvent.getEvent()); } } } } storeGeneticAlterations(values, daoGeneticAlteration, genes.get(0)); numRecordsStored++; } else { for (CanonicalGene gene : genes) { if (gene.isMicroRNA() || rppaProfile) { // for micro rna or protein data, duplicate the data storeGeneticAlterations(values, daoGeneticAlteration, gene); } } } } } } } line = buf.readLine(); } if (MySQLbulkLoader.isBulkLoad()) { MySQLbulkLoader.flushAll(); } if (numRecordsStored == 0) { throw new DaoException ("Something has gone wrong! I did not save any records" + " to the database!"); } } private void storeGeneticAlterations(String[] values, DaoGeneticAlteration daoGeneticAlteration, CanonicalGene gene) throws DaoException { // Check that we have not already imported information regarding this gene. // This is an important check, because a GISTIC or RAE file may contain // multiple rows for the same gene, and we only want to import the first row. if (!importedGeneSet.contains(gene.getEntrezGeneId())) { daoGeneticAlteration.addGeneticAlterations(geneticProfileId, gene.getEntrezGeneId(), values); importedGeneSet.add(gene.getEntrezGeneId()); } } private List<CanonicalGene> parseRPPAGenes(String antibodyWithGene) throws DaoException { DaoGeneOptimized daoGene = DaoGeneOptimized.getInstance(); String[] parts = antibodyWithGene.split("\\|"); String[] symbols = parts[0].split(" "); String arrayId = parts[1]; List<CanonicalGene> genes = new ArrayList<CanonicalGene>(); for (String symbol : symbols) { CanonicalGene gene = daoGene.getNonAmbiguousGene(symbol); if (gene!=null) { genes.add(gene); } } Pattern p = Pattern.compile("(p[STY][0-9]+)"); Matcher m = p.matcher(arrayId); String type, residue; if (!m.find()) { type = "protein_level"; return genes; } else { type = "phosphorylation"; residue = m.group(1); return importPhosphoGene(genes, residue); } } private List<CanonicalGene> importPhosphoGene(List<CanonicalGene> genes, String residue) throws DaoException { DaoGeneOptimized daoGene = DaoGeneOptimized.getInstance(); List<CanonicalGene> phosphoGenes = new ArrayList<CanonicalGene>(); for (CanonicalGene gene : genes) { Set<String> aliases = new HashSet<String>(); aliases.add("rppa-phospho"); aliases.add("phosphoprotein"); aliases.add("phospho"+gene.getStandardSymbol()); String phosphoSymbol = gene.getStandardSymbol()+"_"+residue; CanonicalGene phosphoGene = daoGene.getGene(phosphoSymbol); if (phosphoGene==null) { phosphoGene = new CanonicalGene(phosphoSymbol, aliases); phosphoGene.setType(CanonicalGene.PHOSPHOPROTEIN_TYPE); phosphoGene.setCytoband(gene.getCytoband()); daoGene.addGene(phosphoGene); } phosphoGenes.add(phosphoGene); } return phosphoGenes; } private int getHugoSymbolIndex(String[] headers) { return targetLine==null ? 0 : 1; } private int getEntrezGeneIdIndex(String[] headers) { for (int i = 0; i<headers.length; i++) { if (headers[i].equalsIgnoreCase("Entrez_Gene_Id")) { return i; } } return -1; } private int getStartIndex(String[] headers) { int startIndex = targetLine==null ? 1 : 2; for (int i=startIndex; i<headers.length; i++) { String h = headers[i]; if (!h.equalsIgnoreCase("Gene Symbol") && !h.equalsIgnoreCase("Hugo_Symbol") && !h.equalsIgnoreCase("Entrez_Gene_Id") && !h.equalsIgnoreCase("Locus ID") && !h.equalsIgnoreCase("Cytoband") && !h.equalsIgnoreCase("Composite.Element.Ref")) { return i; } } return startIndex; } private String[] filterOutNormalValues(ArrayList <Integer> filteredSampleIndices, String[] values) { ArrayList<String> filteredValues = new ArrayList<String>(); for (int lc = 0; lc < values.length; lc++) { if (!filteredSampleIndices.contains(lc)) { filteredValues.add(values[lc]); } } return filteredValues.toArray(new String[filteredValues.size()]); } }
core/src/main/java/org/mskcc/cbio/portal/scripts/ImportTabDelimData.java
/* * Copyright (c) 2015 Memorial Sloan-Kettering Cancer Center. * * This library is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS * FOR A PARTICULAR PURPOSE. The software and documentation provided hereunder * is on an "as is" basis, and Memorial Sloan-Kettering Cancer Center has no * obligations to provide maintenance, support, updates, enhancements or * modifications. In no event shall Memorial Sloan-Kettering Cancer Center be * liable to any party for direct, indirect, special, incidental or * consequential damages, including lost profits, arising out of the use of this * software and its documentation, even if Memorial Sloan-Kettering Cancer * Center has been advised of the possibility of such damage. */ /* * This file is part of cBioPortal. * * cBioPortal is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.mskcc.cbio.portal.scripts; import org.mskcc.cbio.portal.dao.*; import org.mskcc.cbio.portal.model.*; import org.mskcc.cbio.portal.util.*; import org.apache.commons.lang.ArrayUtils; import org.apache.log4j.Logger; import java.io.*; import java.util.*; import java.util.regex.Matcher; import java.util.regex.Pattern; import org.apache.commons.lang.StringUtils; /** * Code to Import Copy Number Alteration or MRNA Expression Data. * * @author Ethan Cerami */ public class ImportTabDelimData { private HashSet<Long> importedGeneSet = new HashSet<Long>(); private static Logger logger = Logger.getLogger(ImportTabDelimData.class); /** * Barry Target Line: A constant currently used to indicate the RAE method. */ public static final String BARRY_TARGET = "Barry"; /** * Consensus Target Line: A constant currently used to indicate consensus of multiple * CNA calling algorithms. */ public static final String CONSENSUS_TARGET = "consensus"; private ProgressMonitor pMonitor; private File mutationFile; private String targetLine; private int geneticProfileId; private GeneticProfile geneticProfile; private HashSet<String> microRnaIdSet; /** * Constructor. * * @param dataFile Data File containing CNA data. * @param targetLine The line we want to import. * If null, all lines are imported. * @param geneticProfileId GeneticProfile ID. * @param pMonitor Progress Monitor Object. */ public ImportTabDelimData(File dataFile, String targetLine, int geneticProfileId, ProgressMonitor pMonitor) { this.mutationFile = dataFile; this.targetLine = targetLine; this.geneticProfileId = geneticProfileId; this.pMonitor = pMonitor; } /** * Constructor. * * @param dataFile Data File containing CNA data. * @param geneticProfileId GeneticProfile ID. * @param pMonitor Progress Monitor Object. */ public ImportTabDelimData(File dataFile, int geneticProfileId, ProgressMonitor pMonitor) { this.mutationFile = dataFile; this.geneticProfileId = geneticProfileId; this.pMonitor = pMonitor; } /** * Import the CNA Data. * * @throws IOException IO Error. * @throws DaoException Database Error. */ public void importData() throws IOException, DaoException { DaoMicroRna daoMicroRna = new DaoMicroRna(); microRnaIdSet = daoMicroRna.getEntireSet(); geneticProfile = DaoGeneticProfile.getGeneticProfileById(geneticProfileId); FileReader reader = new FileReader(mutationFile); BufferedReader buf = new BufferedReader(reader); String headerLine = buf.readLine(); String parts[] = headerLine.split("\t"); int sampleStartIndex = getStartIndex(parts); int hugoSymbolIndex = getHugoSymbolIndex(parts); int entrezGeneIdIndex = getEntrezGeneIdIndex(parts); String sampleIds[]; // Branch, depending on targetLine setting if (targetLine == null) { sampleIds = new String[parts.length - sampleStartIndex]; System.arraycopy(parts, sampleStartIndex, sampleIds, 0, parts.length - sampleStartIndex); } else { sampleIds = new String[parts.length - sampleStartIndex]; System.arraycopy(parts, sampleStartIndex, sampleIds, 0, parts.length - sampleStartIndex); } ImportDataUtil.addPatients(sampleIds, geneticProfileId); ImportDataUtil.addSamples(sampleIds, geneticProfileId); pMonitor.setCurrentMessage("Import tab delimited data for " + sampleIds.length + " samples."); // Add Samples to the Database ArrayList <Integer> orderedSampleList = new ArrayList<Integer>(); ArrayList <Integer> filteredSampleIndices = new ArrayList<Integer>(); for (int i = 0; i < sampleIds.length; i++) { Sample sample = DaoSample.getSampleByCancerStudyAndSampleId(geneticProfile.getCancerStudyId(), StableIdUtil.getSampleId(sampleIds[i])); if (sample == null) { assert StableIdUtil.isNormal(sampleIds[i]); filteredSampleIndices.add(i); continue; } if (!DaoSampleProfile.sampleExistsInGeneticProfile(sample.getInternalId(), geneticProfileId)) { DaoSampleProfile.addSampleProfile(sample.getInternalId(), geneticProfileId); } orderedSampleList.add(sample.getInternalId()); } DaoGeneticProfileSamples.addGeneticProfileSamples(geneticProfileId, orderedSampleList); String line = buf.readLine(); int numRecordsStored = 0; DaoGeneOptimized daoGene = DaoGeneOptimized.getInstance(); DaoGeneticAlteration daoGeneticAlteration = DaoGeneticAlteration.getInstance(); boolean discritizedCnaProfile = geneticProfile!=null && geneticProfile.getGeneticAlterationType() == GeneticAlterationType.COPY_NUMBER_ALTERATION && geneticProfile.showProfileInAnalysisTab(); boolean rppaProfile = geneticProfile!=null && geneticProfile.getGeneticAlterationType() == GeneticAlterationType.PROTEIN_LEVEL && "Composite.Element.Ref".equalsIgnoreCase(parts[0]); Map<CnaEvent.Event, CnaEvent.Event> existingCnaEvents = null; long cnaEventId = 0; if (discritizedCnaProfile) { existingCnaEvents = new HashMap<CnaEvent.Event, CnaEvent.Event>(); for (CnaEvent.Event event : DaoCnaEvent.getAllCnaEvents()) { existingCnaEvents.put(event, event); } cnaEventId = DaoCnaEvent.getLargestCnaEventId(); MySQLbulkLoader.bulkLoadOn(); } int lenParts = parts.length; while (line != null) { if (pMonitor != null) { pMonitor.incrementCurValue(); ConsoleUtil.showProgress(pMonitor); } // Ignore lines starting with # if (!line.startsWith("#") && line.trim().length() > 0) { parts = line.split("\t",-1); if (parts.length>lenParts) { if (line.split("\t").length>lenParts) { System.err.println("The following line has more fields (" + parts.length + ") than the headers(" + lenParts + "): \n"+parts[0]); } } String values[] = (String[]) ArrayUtils.subarray(parts, sampleStartIndex, parts.length>lenParts?lenParts:parts.length); values = filterOutNormalValues(filteredSampleIndices, values); String hugo = parts[hugoSymbolIndex]; if (hugo!=null && hugo.isEmpty()) { hugo = null; } String entrez = null; if (entrezGeneIdIndex!=-1) { entrez = parts[entrezGeneIdIndex]; } if (entrez!=null && !entrez.matches("-?[0-9]+")) { entrez = null; } if (hugo != null || entrez != null) { if (hugo != null && (hugo.contains("///") || hugo.contains("---"))) { // Ignore gene IDs separated by ///. This indicates that // the line contains information regarding multiple genes, and // we cannot currently handle this. // Also, ignore gene IDs that are specified as ---. This indicates // the line contains information regarding an unknown gene, and // we cannot currently handle this. logger.debug("Ignoring gene ID: " + hugo); } else { List<CanonicalGene> genes = null; if (entrez!=null) { CanonicalGene gene = daoGene.getGene(Long.parseLong(entrez)); if (gene!=null) { genes = Arrays.asList(gene); } } if (genes==null && hugo != null) { if (rppaProfile) { genes = parseRPPAGenes(hugo); } else { // deal with multiple symbols separate by |, use the first one int ix = hugo.indexOf("|"); if (ix>0) { hugo = hugo.substring(0, ix); } genes = daoGene.guessGene(hugo); } } if (genes == null || genes.isEmpty()) { genes = Collections.emptyList(); } // If no target line is specified or we match the target, process. if (targetLine == null || parts[0].equals(targetLine)) { if (genes.isEmpty()) { // if gene is null, we might be dealing with a micro RNA ID if (hugo != null && hugo.toLowerCase().contains("-mir-")) { // if (microRnaIdSet.contains(geneId)) { // storeMicroRnaAlterations(values, daoMicroRnaAlteration, geneId); // numRecordsStored++; // } else { pMonitor.logWarning("microRNA is not known to me: [" + hugo + "]. Ignoring it " + "and all tab-delimited data associated with it!"); // } } else { String gene = (hugo != null) ? hugo : entrez; pMonitor.logWarning("Gene not found: [" + gene + "]. Ignoring it " + "and all tab-delimited data associated with it!"); } } else if (genes.size()==1) { if (discritizedCnaProfile) { long entrezGeneId = genes.get(0).getEntrezGeneId(); int n = values.length; if (n==0) System.out.println(); int i = values[0].equals(""+entrezGeneId) ? 1:0; for (; i<n; i++) { // temporary solution -- change partial deletion back to full deletion. if (values[i].equals(GeneticAlterationType.PARTIAL_DELETION)) { values[i] = GeneticAlterationType.HOMOZYGOUS_DELETION; } if (values[i].equals(GeneticAlterationType.AMPLIFICATION) // || values[i].equals(GeneticAlterationType.GAIN) // || values[i].equals(GeneticAlterationType.ZERO) // || values[i].equals(GeneticAlterationType.HEMIZYGOUS_DELETION) || values[i].equals(GeneticAlterationType.HOMOZYGOUS_DELETION)) { CnaEvent cnaEvent = new CnaEvent(orderedSampleList.get(i), geneticProfileId, entrezGeneId, Short.parseShort(values[i])); if (existingCnaEvents.containsKey(cnaEvent.getEvent())) { cnaEvent.setEventId(existingCnaEvents.get(cnaEvent.getEvent()).getEventId()); DaoCnaEvent.addCaseCnaEvent(cnaEvent, false); } else { cnaEvent.setEventId(++cnaEventId); DaoCnaEvent.addCaseCnaEvent(cnaEvent, true); existingCnaEvents.put(cnaEvent.getEvent(), cnaEvent.getEvent()); } } } } storeGeneticAlterations(values, daoGeneticAlteration, genes.get(0)); numRecordsStored++; } else { for (CanonicalGene gene : genes) { if (gene.isMicroRNA() || rppaProfile) { // for micro rna or protein data, duplicate the data storeGeneticAlterations(values, daoGeneticAlteration, gene); } } } } } } } line = buf.readLine(); } if (MySQLbulkLoader.isBulkLoad()) { MySQLbulkLoader.flushAll(); } if (numRecordsStored == 0) { throw new DaoException ("Something has gone wrong! I did not save any records" + " to the database!"); } } private void storeGeneticAlterations(String[] values, DaoGeneticAlteration daoGeneticAlteration, CanonicalGene gene) throws DaoException { // Check that we have not already imported information regarding this gene. // This is an important check, because a GISTIC or RAE file may contain // multiple rows for the same gene, and we only want to import the first row. if (!importedGeneSet.contains(gene.getEntrezGeneId())) { daoGeneticAlteration.addGeneticAlterations(geneticProfileId, gene.getEntrezGeneId(), values); importedGeneSet.add(gene.getEntrezGeneId()); } } private List<CanonicalGene> parseRPPAGenes(String antibodyWithGene) throws DaoException { DaoGeneOptimized daoGene = DaoGeneOptimized.getInstance(); String[] parts = antibodyWithGene.split("\\|"); String[] symbols = parts[0].split(" "); String arrayId = parts[1]; List<CanonicalGene> genes = new ArrayList<CanonicalGene>(); for (String symbol : symbols) { CanonicalGene gene = daoGene.getNonAmbiguousGene(symbol); if (gene!=null) { genes.add(gene); } } Pattern p = Pattern.compile("(p[STY][0-9]+)"); Matcher m = p.matcher(arrayId); String type, residue; if (!m.find()) { type = "protein_level"; return genes; } else { type = "phosphorylation"; residue = m.group(1); return importPhosphoGene(genes, residue); } } private List<CanonicalGene> importPhosphoGene(List<CanonicalGene> genes, String residue) throws DaoException { DaoGeneOptimized daoGene = DaoGeneOptimized.getInstance(); List<CanonicalGene> phosphoGenes = new ArrayList<CanonicalGene>(); for (CanonicalGene gene : genes) { Set<String> aliases = new HashSet<String>(); aliases.add("rppa-phospho"); aliases.add("phosphoprotein"); aliases.add("phospho"+gene.getStandardSymbol()); String phosphoSymbol = gene.getStandardSymbol()+"_"+residue; CanonicalGene phosphoGene = daoGene.getGene(phosphoSymbol); if (phosphoGene==null) { phosphoGene = new CanonicalGene(phosphoSymbol, aliases); phosphoGene.setType(CanonicalGene.PHOSPHOPROTEIN_TYPE); daoGene.addGene(phosphoGene); } phosphoGenes.add(phosphoGene); } return phosphoGenes; } private int getHugoSymbolIndex(String[] headers) { return targetLine==null ? 0 : 1; } private int getEntrezGeneIdIndex(String[] headers) { for (int i = 0; i<headers.length; i++) { if (headers[i].equalsIgnoreCase("Entrez_Gene_Id")) { return i; } } return -1; } private int getStartIndex(String[] headers) { int startIndex = targetLine==null ? 1 : 2; for (int i=startIndex; i<headers.length; i++) { String h = headers[i]; if (!h.equalsIgnoreCase("Gene Symbol") && !h.equalsIgnoreCase("Hugo_Symbol") && !h.equalsIgnoreCase("Entrez_Gene_Id") && !h.equalsIgnoreCase("Locus ID") && !h.equalsIgnoreCase("Cytoband") && !h.equalsIgnoreCase("Composite.Element.Ref")) { return i; } } return startIndex; } private String[] filterOutNormalValues(ArrayList <Integer> filteredSampleIndices, String[] values) { ArrayList<String> filteredValues = new ArrayList<String>(); for (int lc = 0; lc < values.length; lc++) { if (!filteredSampleIndices.contains(lc)) { filteredValues.add(values[lc]); } } return filteredValues.toArray(new String[filteredValues.size()]); } }
add cytoband for phosphogenes
core/src/main/java/org/mskcc/cbio/portal/scripts/ImportTabDelimData.java
add cytoband for phosphogenes
Java
agpl-3.0
a534ca90f7dc37b2e0b06f101de46f2644c43e64
0
CeON/CoAnSys,kuraju/CoAnSys,CeON/CoAnSys,acz-icm/coansys,acz-icm/coansys,kuraju/CoAnSys,pdendek/CoAnSys,kuraju/CoAnSys,CeON/CoAnSys,CeON/CoAnSys,acz-icm/coansys,pdendek/CoAnSys,CeON/CoAnSys,kuraju/CoAnSys,pdendek/CoAnSys,pdendek/CoAnSys,pdendek/CoAnSys,acz-icm/coansys,kuraju/CoAnSys,acz-icm/coansys
/* * (C) 2010-2012 ICM UW. All rights reserved. */ package pl.edu.icm.coansys.classification.documents.pig.extractors; import java.io.IOException; import java.util.ArrayList; import java.util.Map; import org.apache.pig.EvalFunc; import org.apache.pig.data.DataBag; import org.apache.pig.data.DefaultDataBag; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; /** * * @author pdendek */ public class EXTRACT_BAG_FROM_MAP extends EvalFunc<DataBag> { // @Override // public Schema outputSchema(Schema p_input){ // try{ // return Schema.generateNestedSchema(DataType.BAG, // DataType.CHARARRAY); // }catch(FrontendException e){ // throw new IllegalStateException(e); // } // } @Override public DataBag exec(Tuple input) throws IOException { try { Map<String, Object> map; String key; Object raw; try { map = (Map<String, Object>) input.get(0); key = (String) input.get(1); raw = map.get(key); } catch (Exception e) { System.out.println("No map or key/The key does not occure in the given map"); return null; } if (raw != null) { DataBag ret = new DefaultDataBag(); // System.out.println("-------------1------------"); String vals = raw.toString(); if (vals.length() <= 2) { return null; } String[] valsA = vals.substring(1, vals.length() - 1).split(","); for (final String v : valsA) { if (v.length() <= 2) { continue; } ret.add(TupleFactory.getInstance().newTuple(new ArrayList<String>() { { add(v.substring(1, v.length() - 1)); } })); } return ret; } return null; } catch (Exception e) { // Throwing an exception will cause the task to fail. throw new RuntimeException("Error while parsing DocumentMetadata" + e); } } }
document-classification/src/main/java/pl/edu/icm/coansys/classification/documents/pig/extractors/EXTRACT_BAG_FROM_MAP.java
/* * (C) 2010-2012 ICM UW. All rights reserved. */ package pl.edu.icm.coansys.classification.documents.pig.extractors; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Map; import javax.ws.rs.PUT; import org.apache.commons.collections.Bag; import org.apache.pig.EvalFunc; import org.apache.pig.data.DataBag; import org.apache.pig.data.DataType; import org.apache.pig.data.DefaultDataBag; import org.apache.pig.data.Tuple; import org.apache.pig.data.TupleFactory; import org.apache.pig.impl.logicalLayer.FrontendException; import org.apache.pig.impl.logicalLayer.schema.Schema; /** * * @author pdendek */ public class EXTRACT_BAG_FROM_MAP extends EvalFunc<DataBag> { // @Override // public Schema outputSchema(Schema p_input){ // try{ // return Schema.generateNestedSchema(DataType.BAG, // DataType.CHARARRAY); // }catch(FrontendException e){ // throw new IllegalStateException(e); // } // } static String vv; @Override public DataBag exec(Tuple input) throws IOException { try { Map<String, Object> map; String key; Object raw; try{ map = (Map<String, Object>) input.get(0); key = (String) input.get(1); raw = map.get(key); }catch(Exception e){ System.out.println("No map or key/The key does not occure in the given map"); return null; } if(raw!=null){ DataBag ret = new DefaultDataBag(); // System.out.println("-------------1------------"); String vals = raw.toString(); if(vals.length()<=2){ return null; } String[] valsA = vals.substring(1,vals.length()-1).split(","); for(String v : valsA){ vv=v; if(vv.length()<=2){ continue; } ret.add(TupleFactory.getInstance().newTuple(new ArrayList<String>(){{ add(vv.substring(1,vv.length()-1)); }})); } return ret; } return null; } catch (Exception e) { // Throwing an exception will cause the task to fail. throw new RuntimeException("Error while parsing DocumentMetadata"+e); } } }
- Changed static field to final local variable. - Organized imports.
document-classification/src/main/java/pl/edu/icm/coansys/classification/documents/pig/extractors/EXTRACT_BAG_FROM_MAP.java
- Changed static field to final local variable. - Organized imports.
Java
agpl-3.0
be81dd7d891eb913e340a6b864e4426dd71cfba4
0
rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,tdefilip/opennms,rdkgit/opennms,tdefilip/opennms,aihua/opennms,roskens/opennms-pre-github,aihua/opennms,tdefilip/opennms,aihua/opennms,aihua/opennms,roskens/opennms-pre-github,aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,rdkgit/opennms,tdefilip/opennms,tdefilip/opennms,aihua/opennms,tdefilip/opennms,aihua/opennms,tdefilip/opennms,aihua/opennms,aihua/opennms,roskens/opennms-pre-github,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms,rdkgit/opennms,roskens/opennms-pre-github,rdkgit/opennms,tdefilip/opennms,rdkgit/opennms
// // This file is part of the OpenNMS(R) Application. // // OpenNMS(R) is Copyright (C) 2006 The OpenNMS Group, Inc. All rights reserved. // OpenNMS(R) is a derivative work, containing both original code, included code and modified // code that was published under the GNU General Public License. Copyrights for modified // and included code are below. // // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // // Modifications: // // 2006 Aug 15: Use generics for collections, add a log message. - [email protected] // // Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved. // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // For more information contact: // OpenNMS Licensing <[email protected]> // http://www.opennms.org/ // http://www.opennms.com/ // package org.opennms.netmgt.collectd; import java.util.Collection; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.opennms.netmgt.snmp.SnmpInstId; public class IfResourceType extends ResourceType { private TreeMap<Integer, IfInfo> m_ifMap; public IfResourceType(CollectionAgent agent, OnmsSnmpCollection snmpCollection) { super(agent, snmpCollection); m_ifMap = new TreeMap<Integer, IfInfo>(); addKnownIfResources(); } private Map<Integer, IfInfo> getIfMap() { return m_ifMap; } void addIfInfo(IfInfo ifInfo) { getIfMap().put(new Integer(ifInfo.getIndex()), ifInfo); } void addKnownIfResources() { Set<IfInfo> ifInfos = getAgent().getSnmpInterfaceInfo(this); for(IfInfo ifInfo : ifInfos) { addIfInfo(ifInfo); } } IfInfo getIfInfo(int ifIndex) { return getIfMap().get(new Integer(ifIndex)); } public Collection<IfInfo> getIfInfos() { return getIfMap().values(); } public CollectionResource findResource(SnmpInstId inst) { return getIfInfo(inst.toInt()); } public CollectionResource findAliasedResource(SnmpInstId inst, String ifAlias) { // This is here for completeness but it should not get called from here. // findResource should be called instead log().debug("findAliasedResource: Should not get called from IfResourceType"); return null; } public Collection<IfInfo> getResources() { return m_ifMap.values(); } @Override protected Collection<AttributeType> loadAttributeTypes() { return getCollection().getIndexedAttributeTypesForResourceType(getAgent(), this); } }
opennms-services/src/main/java/org/opennms/netmgt/collectd/IfResourceType.java
// // This file is part of the OpenNMS(R) Application. // // OpenNMS(R) is Copyright (C) 2006 The OpenNMS Group, Inc. All rights reserved. // OpenNMS(R) is a derivative work, containing both original code, included code and modified // code that was published under the GNU General Public License. Copyrights for modified // and included code are below. // // OpenNMS(R) is a registered trademark of The OpenNMS Group, Inc. // // Modifications: // // 2006 Aug 15: Use generics for collections, add a log message. - [email protected] // // Original code base Copyright (C) 1999-2001 Oculan Corp. All rights reserved. // // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // For more information contact: // OpenNMS Licensing <[email protected]> // http://www.opennms.org/ // http://www.opennms.com/ // package org.opennms.netmgt.collectd; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TreeMap; import org.opennms.netmgt.config.DataCollectionConfig; import org.opennms.netmgt.snmp.SnmpInstId; public class IfResourceType extends ResourceType { private TreeMap<Integer, IfInfo> m_ifMap; public IfResourceType(CollectionAgent agent, OnmsSnmpCollection snmpCollection) { super(agent, snmpCollection); System.err.println("Creating ifResourceType for agent"+agent); m_ifMap = new TreeMap<Integer, IfInfo>(); addKnownIfResources(); } private Map<Integer, IfInfo> getIfMap() { return m_ifMap; } void addIfInfo(IfInfo ifInfo) { getIfMap().put(new Integer(ifInfo.getIndex()), ifInfo); } void addKnownIfResources() { Set<IfInfo> ifInfos = getAgent().getSnmpInterfaceInfo(this); for(IfInfo ifInfo : ifInfos) { addIfInfo(ifInfo); } } IfInfo getIfInfo(int ifIndex) { return getIfMap().get(new Integer(ifIndex)); } public Collection<IfInfo> getIfInfos() { return getIfMap().values(); } public CollectionResource findResource(SnmpInstId inst) { return getIfInfo(inst.toInt()); } public CollectionResource findAliasedResource(SnmpInstId inst, String ifAlias) { // This is here for completeness but it should not get called from here. // findResource should be called instead log().debug("findAliasedResource: Should not get called from IfResourceType"); return null; } public Collection<IfInfo> getResources() { return m_ifMap.values(); } @Override protected Collection<AttributeType> loadAttributeTypes() { return getCollection().getIndexedAttributeTypesForResourceType(getAgent(), this); } }
oops... remove extraneous debug stmt
opennms-services/src/main/java/org/opennms/netmgt/collectd/IfResourceType.java
oops... remove extraneous debug stmt
Java
lgpl-2.1
55de827974ed26d93d1c15cee78cc1d4b3b40957
0
spotbugs/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,KengoTODA/spotbugs,sewe/spotbugs,spotbugs/spotbugs,KengoTODA/spotbugs,spotbugs/spotbugs,johnscancella/spotbugs,spotbugs/spotbugs,sewe/spotbugs,sewe/spotbugs,johnscancella/spotbugs,johnscancella/spotbugs,sewe/spotbugs,johnscancella/spotbugs
/* * FindBugs - Find bugs in Java programs * Copyright (C) 2003,2004 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.detect; import java.text.DateFormat; import java.util.Calendar; import java.util.Collection; import org.apache.bcel.classfile.Code; import org.apache.bcel.classfile.Constant; import org.apache.bcel.classfile.ConstantClass; import org.apache.bcel.classfile.ConstantPool; import org.apache.bcel.classfile.Field; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.ObjectType; import edu.umd.cs.findbugs.BugAccumulator; import edu.umd.cs.findbugs.BugInstance; import edu.umd.cs.findbugs.BugReporter; import edu.umd.cs.findbugs.OpcodeStack; import edu.umd.cs.findbugs.SystemProperties; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.CFG; import edu.umd.cs.findbugs.ba.CFGBuilderException; import edu.umd.cs.findbugs.ba.DataflowAnalysisException; import edu.umd.cs.findbugs.ba.Location; import edu.umd.cs.findbugs.ba.LockDataflow; import edu.umd.cs.findbugs.ba.LockSet; import edu.umd.cs.findbugs.ba.XField; import edu.umd.cs.findbugs.ba.ch.Subtypes2; import edu.umd.cs.findbugs.bcel.OpcodeStackDetector; import edu.umd.cs.findbugs.classfile.ClassDescriptor; import edu.umd.cs.findbugs.classfile.DescriptorFactory; /** * Detector for static fields of type {@link java.util.Calendar} or * {@link java.text.DateFormat} and their subclasses. Because {@link Calendar} * is unsafe for multithreaded use, static fields look suspicous. To work * correctly, all access would need to be synchronized by the client which * cannot be guaranteed. * * @author Daniel Schneller */ public class StaticCalendarDetector extends OpcodeStackDetector { /** External Debug flag set? */ private static final boolean DEBUG = Boolean.getBoolean("debug.staticcal"); /** * External flag to determine whether to skip the test for synchronized * blocks (default: if a call on a static Calendar or DateFormat is detected * inside a synchronizationb block, it will not be reported). Setting this * to <code>true</code> will report method calls on static fields if they * are in a synchronized block. As the check currently does not take into * account the lock's mutex it may be useful to switch allow */ private static final String PROP_SKIP_SYNCHRONIZED_CHECK = "staticcal.skipsynccheck"; /** The reporter to report to */ final private BugReporter reporter; final private BugAccumulator bugAccumulator; /** Name of the class being inspected */ private String currentClass; /** * {@link ObjectType} for {@link java.util.Calendar} */ private final ClassDescriptor calendarType = DescriptorFactory.createClassDescriptor("java/util/Calendar"); /** * {@link ObjectType} for {@link java.text.DateFormat} */ private final ClassDescriptor dateFormatType = DescriptorFactory.createClassDescriptor("java/text/DateFormat"); /** Stores the current method */ private Method currentMethod = null; /** Stores current Control Flow Graph */ private CFG currentCFG; /** Stores current LDF */ private LockDataflow currentLockDataFlow; /** * Creates a new instance of this Detector. * * @param aReporter * {@link BugReporter} instance to report found problems to. */ public StaticCalendarDetector(BugReporter aReporter) { reporter = aReporter; bugAccumulator = new BugAccumulator(reporter); } Subtypes2 subtypes2 = AnalysisContext.currentAnalysisContext().getSubtypes2(); private boolean sawDateClass; /** * Remembers the class name and resets temporary fields. */ @Override public void visit(JavaClass someObj) { currentClass = someObj.getClassName(); currentMethod = null; currentCFG = null; currentLockDataFlow = null; sawDateClass = false; } @Override public void visit(ConstantPool pool) { for(Constant constant : pool.getConstantPool()) { if (constant instanceof ConstantClass) { ConstantClass cc = (ConstantClass) constant; String className = cc.getBytes(pool); if (className.equals("java/util/Calendar") || className.equals("java/text/DateFormat")) sawDateClass = true; } } } /** * Checks if the visited field is of type {@link Calendar} or * {@link DateFormat} or a subclass of either one. If so and the field is * static it is suspicious and will be reported. */ @Override public void visit(Field aField) { if (!aField.isStatic()) return; if (!aField.isPublic() && !aField.isProtected()) return; ClassDescriptor classOfField = DescriptorFactory.createClassDescriptorFromFieldSignature(aField.getSignature()); String tBugType = null; int priority = aField.isPublic() && getThisClass().isPublic() ? HIGH_PRIORITY : NORMAL_PRIORITY; if (classOfField != null) try { if (subtypes2.isSubtype(classOfField, calendarType)) { tBugType = "STCAL_STATIC_CALENDAR_INSTANCE"; priority++; } else if (subtypes2.isSubtype(classOfField,dateFormatType)) { tBugType = "STCAL_STATIC_SIMPLE_DATE_FORMAT_INSTANCE"; } if (tBugType != null) { reporter.reportBug(new BugInstance(this, tBugType, priority).addClass(currentClass).addField(this)); } } catch (ClassNotFoundException e) { AnalysisContext.reportMissingClass(e); } } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.visitclass.BetterVisitor#visitMethod(org.apache.bcel.classfile.Method) */ @Override public void visitMethod(Method obj) { if (sawDateClass) try { super.visitMethod(obj); currentMethod = obj; currentLockDataFlow = getClassContext().getLockDataflow(currentMethod); currentCFG = getClassContext().getCFG(currentMethod); } catch (CFGBuilderException e) { reporter.logError("Synchronization check in Static Calendar Detector caught an error.", e); } catch (DataflowAnalysisException e) { reporter.logError("Synchronization check in Static Calendar Detector caught an error.", e); } } @Override public void visit(Code obj) { if (sawDateClass) { super.visit(obj); bugAccumulator.reportAccumulatedBugs(); } } /** * Checks for method invocations ({@link org.apache.bcel.generic.INVOKEVIRTUAL}) * call on a static {@link Calendar} or {@link DateFormat} fields. The * {@link OpcodeStack} is used to determine if an invocation is done on such * a static field. * * @param seen * An opcode to be analyzed * @see edu.umd.cs.findbugs.visitclass.DismantleBytecode#sawOpcode(int) */ @Override public void sawOpcode(int seen) { // we are only interested in method calls if (seen != INVOKEVIRTUAL) { return; } try { String className = getClassConstantOperand(); if (className.startsWith("[")) { // Ignore array classes return; } ClassDescriptor cDesc = DescriptorFactory.createClassDescriptor(className); // if it is not compatible with Calendar or DateFormat, we are not // interested anymore boolean isCalendar = subtypes2.isSubtype(cDesc, calendarType); boolean isDateFormat = subtypes2.isSubtype(cDesc, dateFormatType); if (!isCalendar && !isDateFormat) { return; } // determine the number of arguments the method expects int numArguments = getNumberArguments(getSigConstantOperand()); // go back on the stack to find what the receiver of the method is OpcodeStack.Item invokedOn = stack.getStackItem(numArguments); XField field = invokedOn.getXField(); // find out, if the field is static. if not, we are not interested // anymore if (field == null || !field.isStatic()) { return; } if (getMethodName().equals("<clinit>") && field.getClassName().equals(getDottedClassName())) return; String invokedName = getNameConstantOperand(); if (invokedName.startsWith("get")) return; if (invokedName.equals("equals") && numArguments == 1) { OpcodeStack.Item passedAsArgument = stack.getStackItem(0); field = passedAsArgument.getXField(); if (field == null || !field.isStatic()) { return; } } if (!SystemProperties.getBoolean(PROP_SKIP_SYNCHRONIZED_CHECK)) { // check synchronization try { if (currentMethod != null && currentLockDataFlow != null && currentCFG != null) { Collection<Location> tLocations = currentCFG.getLocationsContainingInstructionWithOffset(getPC()); for (Location tLoc : tLocations) { LockSet lockSet = currentLockDataFlow.getFactAtLocation(tLoc); if (lockSet.getNumLockedObjects() > 0) { // within a synchronized block return; } } } } catch (DataflowAnalysisException e) { reporter.logError("Synchronization check in Static Calendar Detector caught an error.", e); } } // if we get here, we want to generate a report, depending on the // type String tBugType; if (isCalendar) { tBugType = "STCAL_INVOKE_ON_STATIC_CALENDAR_INSTANCE"; } else if (isDateFormat) { tBugType = "STCAL_INVOKE_ON_STATIC_DATE_FORMAT_INSTANCE"; } else throw new IllegalStateException("Not possible"); int priority = NORMAL_PRIORITY; if (invokedName.startsWith("set") || invokedName.equals("format") || invokedName.equals("add") || invokedName.equals("clear") || invokedName.equals("parse")) priority--; bugAccumulator.accumulateBug(new BugInstance(this, tBugType, priority).addClassAndMethod(this).addCalledMethod(this) .addOptionalField(field), this); } catch (ClassNotFoundException e) { AnalysisContext.reportMissingClass(e); } } }
findbugs/src/java/edu/umd/cs/findbugs/detect/StaticCalendarDetector.java
/* * FindBugs - Find bugs in Java programs * Copyright (C) 2003,2004 University of Maryland * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ package edu.umd.cs.findbugs.detect; import java.text.DateFormat; import java.util.Calendar; import java.util.Collection; import org.apache.bcel.classfile.Code; import org.apache.bcel.classfile.Constant; import org.apache.bcel.classfile.ConstantClass; import org.apache.bcel.classfile.ConstantPool; import org.apache.bcel.classfile.Field; import org.apache.bcel.classfile.JavaClass; import org.apache.bcel.classfile.Method; import org.apache.bcel.generic.ObjectType; import edu.umd.cs.findbugs.BugAccumulator; import edu.umd.cs.findbugs.BugInstance; import edu.umd.cs.findbugs.BugReporter; import edu.umd.cs.findbugs.OpcodeStack; import edu.umd.cs.findbugs.SystemProperties; import edu.umd.cs.findbugs.ba.AnalysisContext; import edu.umd.cs.findbugs.ba.CFG; import edu.umd.cs.findbugs.ba.CFGBuilderException; import edu.umd.cs.findbugs.ba.DataflowAnalysisException; import edu.umd.cs.findbugs.ba.Location; import edu.umd.cs.findbugs.ba.LockDataflow; import edu.umd.cs.findbugs.ba.LockSet; import edu.umd.cs.findbugs.ba.XField; import edu.umd.cs.findbugs.ba.ch.Subtypes2; import edu.umd.cs.findbugs.bcel.OpcodeStackDetector; import edu.umd.cs.findbugs.classfile.ClassDescriptor; import edu.umd.cs.findbugs.classfile.DescriptorFactory; /** * Detector for static fields of type {@link java.util.Calendar} or * {@link java.text.DateFormat} and their subclasses. Because {@link Calendar} * is unsafe for multithreaded use, static fields look suspicous. To work * correctly, all access would need to be synchronized by the client which * cannot be guaranteed. * * @author Daniel Schneller */ public class StaticCalendarDetector extends OpcodeStackDetector { /** External Debug flag set? */ private static final boolean DEBUG = Boolean.getBoolean("debug.staticcal"); /** * External flag to determine whether to skip the test for synchronized * blocks (default: if a call on a static Calendar or DateFormat is detected * inside a synchronizationb block, it will not be reported). Setting this * to <code>true</code> will report method calls on static fields if they * are in a synchronized block. As the check currently does not take into * account the lock's mutex it may be useful to switch allow */ private static final String PROP_SKIP_SYNCHRONIZED_CHECK = "staticcal.skipsynccheck"; /** The reporter to report to */ final private BugReporter reporter; final private BugAccumulator bugAccumulator; /** Name of the class being inspected */ private String currentClass; /** * {@link ObjectType} for {@link java.util.Calendar} */ private final ClassDescriptor calendarType = DescriptorFactory.createClassDescriptor("java/util/Calendar"); /** * {@link ObjectType} for {@link java.text.DateFormat} */ private final ClassDescriptor dateFormatType = DescriptorFactory.createClassDescriptor("java/text/DateFormat"); /** Stores the current method */ private Method currentMethod = null; /** Stores current Control Flow Graph */ private CFG currentCFG; /** Stores current LDF */ private LockDataflow currentLockDataFlow; /** * Creates a new instance of this Detector. * * @param aReporter * {@link BugReporter} instance to report found problems to. */ public StaticCalendarDetector(BugReporter aReporter) { reporter = aReporter; bugAccumulator = new BugAccumulator(reporter); } Subtypes2 subtypes2 = AnalysisContext.currentAnalysisContext().getSubtypes2(); private boolean sawDateClass; /** * Remembers the class name and resets temporary fields. */ @Override public void visit(JavaClass someObj) { currentClass = someObj.getClassName(); currentMethod = null; currentCFG = null; currentLockDataFlow = null; sawDateClass = false; } @Override public void visit(ConstantPool pool) { for(Constant constant : pool.getConstantPool()) { if (constant instanceof ConstantClass) { ConstantClass cc = (ConstantClass) constant; String className = cc.getBytes(pool); if (className.equals("java/util/Calendar") || className.equals("java/text/DateFormat")) sawDateClass = true; } } } /** * Checks if the visited field is of type {@link Calendar} or * {@link DateFormat} or a subclass of either one. If so and the field is * static it is suspicious and will be reported. */ @Override public void visit(Field aField) { if (!aField.isStatic()) return; if (!aField.isPublic() && !aField.isProtected()) return; ClassDescriptor classOfField = DescriptorFactory.createClassDescriptorFromFieldSignature(aField.getSignature()); String tBugType = null; if (classOfField != null) try { if (subtypes2.isSubtype(classOfField, calendarType)) { tBugType = "STCAL_STATIC_CALENDAR_INSTANCE"; } else if (subtypes2.isSubtype(classOfField,dateFormatType)) { tBugType = "STCAL_STATIC_SIMPLE_DATE_FORMAT_INSTANCE"; } if (tBugType != null) { reporter.reportBug(new BugInstance(this, tBugType, aField.isPublic() ? HIGH_PRIORITY : NORMAL_PRIORITY).addClass(currentClass).addField(this)); } } catch (ClassNotFoundException e) { AnalysisContext.reportMissingClass(e); } } /* * (non-Javadoc) * * @see edu.umd.cs.findbugs.visitclass.BetterVisitor#visitMethod(org.apache.bcel.classfile.Method) */ @Override public void visitMethod(Method obj) { if (sawDateClass) try { super.visitMethod(obj); currentMethod = obj; currentLockDataFlow = getClassContext().getLockDataflow(currentMethod); currentCFG = getClassContext().getCFG(currentMethod); } catch (CFGBuilderException e) { reporter.logError("Synchronization check in Static Calendar Detector caught an error.", e); } catch (DataflowAnalysisException e) { reporter.logError("Synchronization check in Static Calendar Detector caught an error.", e); } } @Override public void visit(Code obj) { if (sawDateClass) { super.visit(obj); bugAccumulator.reportAccumulatedBugs(); } } /** * Checks for method invocations ({@link org.apache.bcel.generic.INVOKEVIRTUAL}) * call on a static {@link Calendar} or {@link DateFormat} fields. The * {@link OpcodeStack} is used to determine if an invocation is done on such * a static field. * * @param seen * An opcode to be analyzed * @see edu.umd.cs.findbugs.visitclass.DismantleBytecode#sawOpcode(int) */ @Override public void sawOpcode(int seen) { // we are only interested in method calls if (seen != INVOKEVIRTUAL) { return; } try { String className = getClassConstantOperand(); if (className.startsWith("[")) { // Ignore array classes return; } ClassDescriptor cDesc = DescriptorFactory.createClassDescriptor(className); // if it is not compatible with Calendar or DateFormat, we are not // interested anymore boolean isCalendar = subtypes2.isSubtype(cDesc, calendarType); boolean isDateFormat = subtypes2.isSubtype(cDesc, dateFormatType); if (!isCalendar && !isDateFormat) { return; } // determine the number of arguments the method expects int numArguments = getNumberArguments(getSigConstantOperand()); // go back on the stack to find what the receiver of the method is OpcodeStack.Item invokedOn = stack.getStackItem(numArguments); XField field = invokedOn.getXField(); // find out, if the field is static. if not, we are not interested // anymore if (field == null || !field.isStatic()) { return; } if (getMethodName().equals("<clinit>") && field.getClassName().equals(getDottedClassName())) return; if (getNameConstantOperand().startsWith("get")) return; if (getNameConstantOperand().equals("equals") && numArguments == 1) { OpcodeStack.Item passedAsArgument = stack.getStackItem(0); field = passedAsArgument.getXField(); if (field == null || !field.isStatic()) { return; } } if (!SystemProperties.getBoolean(PROP_SKIP_SYNCHRONIZED_CHECK)) { // check synchronization try { if (currentMethod != null && currentLockDataFlow != null && currentCFG != null) { Collection<Location> tLocations = currentCFG.getLocationsContainingInstructionWithOffset(getPC()); for (Location tLoc : tLocations) { LockSet lockSet = currentLockDataFlow.getFactAtLocation(tLoc); if (lockSet.getNumLockedObjects() > 0) { // within a synchronized block return; } } } } catch (DataflowAnalysisException e) { reporter.logError("Synchronization check in Static Calendar Detector caught an error.", e); } } // if we get here, we want to generate a report, depending on the // type String tBugType; if (isCalendar) { tBugType = "STCAL_INVOKE_ON_STATIC_CALENDAR_INSTANCE"; } else if (isDateFormat) { tBugType = "STCAL_INVOKE_ON_STATIC_DATE_FORMAT_INSTANCE"; } else throw new IllegalStateException("Not possible"); bugAccumulator.accumulateBug(new BugInstance(this, tBugType, NORMAL_PRIORITY).addClassAndMethod(this).addCalledMethod(this) .addOptionalField(field), this); } catch (ClassNotFoundException e) { AnalysisContext.reportMissingClass(e); } } }
priority tweaking git-svn-id: e7d6bde23f017c9ff4efd468d79d66def666766b@9485 eae3c2d3-9b19-0410-a86e-396b6ccb6ab3
findbugs/src/java/edu/umd/cs/findbugs/detect/StaticCalendarDetector.java
priority tweaking
Java
apache-2.0
c3ad37f94bfa778b4f93a0a67c19cc69f99ec97e
0
speedment/speedment,speedment/speedment
/** * Common traits used in the implementations of the typed * {@link com.speedment.ui.config.DocumentProperty} instances are located in * this package. */ package com.speedment.internal.ui.config.trait;
src/main/java/com/speedment/internal/ui/config/trait/package-info.java
/** * Common traits used in the implementations of the typed * {@link com.speedment.ui.config.DocumentProperty} instances are located in * this package. * <p> * This package is part of the API. Modifications to classes here should only * (if ever) be done in major releases. */ package com.speedment.internal.ui.config.trait;
Trait is no longer part of the API
src/main/java/com/speedment/internal/ui/config/trait/package-info.java
Trait is no longer part of the API
Java
apache-2.0
26e09e13ebb6e32452a9c5afa96c4afc44ef21b0
0
mnki/camel,tdiesler/camel,salikjan/camel,davidwilliams1978/camel,coderczp/camel,isururanawaka/camel,neoramon/camel,pplatek/camel,dsimansk/camel,jmandawg/camel,lburgazzoli/apache-camel,mnki/camel,trohovsky/camel,yuruki/camel,w4tson/camel,satishgummadelli/camel,joakibj/camel,partis/camel,royopa/camel,bgaudaen/camel,neoramon/camel,bhaveshdt/camel,lowwool/camel,qst-jdc-labs/camel,royopa/camel,nboukhed/camel,objectiser/camel,coderczp/camel,dvankleef/camel,joakibj/camel,lasombra/camel,CodeSmell/camel,w4tson/camel,sverkera/camel,veithen/camel,snadakuduru/camel,skinzer/camel,lburgazzoli/camel,nikvaessen/camel,drsquidop/camel,sirlatrom/camel,nboukhed/camel,coderczp/camel,grange74/camel,lasombra/camel,jollygeorge/camel,anoordover/camel,jamesnetherton/camel,jlpedrosa/camel,maschmid/camel,rparree/camel,tdiesler/camel,stalet/camel,neoramon/camel,bgaudaen/camel,jameszkw/camel,nikhilvibhav/camel,sverkera/camel,eformat/camel,jarst/camel,acartapanis/camel,partis/camel,askannon/camel,dkhanolkar/camel,ullgren/camel,sabre1041/camel,gilfernandes/camel,adessaigne/camel,pplatek/camel,w4tson/camel,bgaudaen/camel,chanakaudaya/camel,w4tson/camel,onders86/camel,skinzer/camel,haku/camel,partis/camel,CodeSmell/camel,YMartsynkevych/camel,gilfernandes/camel,YMartsynkevych/camel,christophd/camel,snurmine/camel,mcollovati/camel,veithen/camel,dpocock/camel,dmvolod/camel,tdiesler/camel,anoordover/camel,jameszkw/camel,pplatek/camel,bfitzpat/camel,tdiesler/camel,hqstevenson/camel,MohammedHammam/camel,askannon/camel,ekprayas/camel,prashant2402/camel,dmvolod/camel,tkopczynski/camel,mzapletal/camel,dvankleef/camel,ullgren/camel,manuelh9r/camel,manuelh9r/camel,apache/camel,JYBESSON/camel,snurmine/camel,anton-k11/camel,noelo/camel,erwelch/camel,davidwilliams1978/camel,jonmcewen/camel,tarilabs/camel,stalet/camel,davidwilliams1978/camel,akhettar/camel,sabre1041/camel,grange74/camel,tlehoux/camel,bfitzpat/camel,stravag/camel,jlpedrosa/camel,jmandawg/camel,yuruki/camel,ekprayas/camel,driseley/camel,NickCis/camel,akhettar/camel,qst-jdc-labs/camel,neoramon/camel,jameszkw/camel,CandleCandle/camel,edigrid/camel,mzapletal/camel,christophd/camel,lowwool/camel,oalles/camel,driseley/camel,bhaveshdt/camel,kevinearls/camel,jollygeorge/camel,maschmid/camel,dvankleef/camel,stravag/camel,jkorab/camel,erwelch/camel,anton-k11/camel,stalet/camel,snadakuduru/camel,ge0ffrey/camel,sverkera/camel,iweiss/camel,NetNow/camel,mcollovati/camel,snadakuduru/camel,allancth/camel,chirino/camel,jkorab/camel,iweiss/camel,lowwool/camel,hqstevenson/camel,hqstevenson/camel,Thopap/camel,royopa/camel,chirino/camel,jonmcewen/camel,MohammedHammam/camel,isavin/camel,bdecoste/camel,drsquidop/camel,dpocock/camel,yury-vashchyla/camel,noelo/camel,haku/camel,yuruki/camel,allancth/camel,noelo/camel,YMartsynkevych/camel,johnpoth/camel,driseley/camel,nicolaferraro/camel,apache/camel,jarst/camel,objectiser/camel,acartapanis/camel,atoulme/camel,mzapletal/camel,dkhanolkar/camel,stravag/camel,tdiesler/camel,NetNow/camel,yogamaha/camel,ssharma/camel,jamesnetherton/camel,mgyongyosi/camel,lburgazzoli/camel,ekprayas/camel,FingolfinTEK/camel,gnodet/camel,davidwilliams1978/camel,dkhanolkar/camel,NickCis/camel,ssharma/camel,duro1/camel,pmoerenhout/camel,pkletsko/camel,NickCis/camel,eformat/camel,eformat/camel,mike-kukla/camel,trohovsky/camel,arnaud-deprez/camel,lowwool/camel,pkletsko/camel,neoramon/camel,stalet/camel,mcollovati/camel,grgrzybek/camel,pplatek/camel,punkhorn/camel-upstream,jpav/camel,MrCoder/camel,qst-jdc-labs/camel,bgaudaen/camel,gilfernandes/camel,maschmid/camel,pax95/camel,anton-k11/camel,brreitme/camel,ekprayas/camel,jollygeorge/camel,rmarting/camel,bdecoste/camel,nikvaessen/camel,nicolaferraro/camel,gautric/camel,jkorab/camel,scranton/camel,edigrid/camel,DariusX/camel,bfitzpat/camel,woj-i/camel,bfitzpat/camel,sebi-hgdata/camel,gautric/camel,MohammedHammam/camel,yogamaha/camel,erwelch/camel,jarst/camel,driseley/camel,stravag/camel,jmandawg/camel,stravag/camel,DariusX/camel,MohammedHammam/camel,manuelh9r/camel,zregvart/camel,trohovsky/camel,noelo/camel,josefkarasek/camel,prashant2402/camel,partis/camel,mgyongyosi/camel,duro1/camel,edigrid/camel,gnodet/camel,gilfernandes/camel,lburgazzoli/apache-camel,pkletsko/camel,JYBESSON/camel,joakibj/camel,ge0ffrey/camel,grgrzybek/camel,tkopczynski/camel,bdecoste/camel,dsimansk/camel,isavin/camel,qst-jdc-labs/camel,CandleCandle/camel,lowwool/camel,alvinkwekel/camel,joakibj/camel,tdiesler/camel,mike-kukla/camel,nikhilvibhav/camel,drsquidop/camel,gautric/camel,anoordover/camel,isururanawaka/camel,drsquidop/camel,borcsokj/camel,pax95/camel,apache/camel,dkhanolkar/camel,mzapletal/camel,w4tson/camel,sabre1041/camel,dkhanolkar/camel,isururanawaka/camel,haku/camel,jamesnetherton/camel,tlehoux/camel,onders86/camel,sebi-hgdata/camel,jpav/camel,isururanawaka/camel,hqstevenson/camel,jarst/camel,scranton/camel,brreitme/camel,ge0ffrey/camel,CodeSmell/camel,brreitme/camel,mohanaraosv/camel,tlehoux/camel,pplatek/camel,mohanaraosv/camel,cunningt/camel,joakibj/camel,gnodet/camel,Fabryprog/camel,arnaud-deprez/camel,w4tson/camel,lburgazzoli/apache-camel,ullgren/camel,pmoerenhout/camel,objectiser/camel,nikvaessen/camel,tarilabs/camel,iweiss/camel,nikhilvibhav/camel,satishgummadelli/camel,edigrid/camel,jlpedrosa/camel,gautric/camel,kevinearls/camel,RohanHart/camel,nikvaessen/camel,satishgummadelli/camel,yury-vashchyla/camel,jollygeorge/camel,mohanaraosv/camel,YoshikiHigo/camel,onders86/camel,scranton/camel,lburgazzoli/camel,yury-vashchyla/camel,maschmid/camel,yogamaha/camel,kevinearls/camel,bdecoste/camel,bhaveshdt/camel,edigrid/camel,mgyongyosi/camel,mohanaraosv/camel,stalet/camel,dpocock/camel,noelo/camel,gyc567/camel,tarilabs/camel,josefkarasek/camel,jpav/camel,trohovsky/camel,lburgazzoli/apache-camel,dmvolod/camel,jarst/camel,sverkera/camel,pplatek/camel,punkhorn/camel-upstream,atoulme/camel,pplatek/camel,pmoerenhout/camel,borcsokj/camel,CodeSmell/camel,lburgazzoli/camel,tkopczynski/camel,royopa/camel,satishgummadelli/camel,nicolaferraro/camel,atoulme/camel,nikhilvibhav/camel,dpocock/camel,snurmine/camel,veithen/camel,YoshikiHigo/camel,josefkarasek/camel,erwelch/camel,hqstevenson/camel,scranton/camel,nboukhed/camel,johnpoth/camel,Fabryprog/camel,snadakuduru/camel,nicolaferraro/camel,isururanawaka/camel,trohovsky/camel,coderczp/camel,YoshikiHigo/camel,drsquidop/camel,chanakaudaya/camel,acartapanis/camel,Fabryprog/camel,dsimansk/camel,sirlatrom/camel,jameszkw/camel,CandleCandle/camel,joakibj/camel,ssharma/camel,lburgazzoli/apache-camel,askannon/camel,kevinearls/camel,isavin/camel,acartapanis/camel,davidkarlsen/camel,sabre1041/camel,akhettar/camel,curso007/camel,sirlatrom/camel,bhaveshdt/camel,sebi-hgdata/camel,christophd/camel,pkletsko/camel,anoordover/camel,Thopap/camel,NetNow/camel,rparree/camel,FingolfinTEK/camel,stravag/camel,stalet/camel,JYBESSON/camel,snadakuduru/camel,chirino/camel,skinzer/camel,pmoerenhout/camel,gautric/camel,curso007/camel,scranton/camel,royopa/camel,snadakuduru/camel,gyc567/camel,chanakaudaya/camel,johnpoth/camel,pax95/camel,dpocock/camel,bdecoste/camel,tarilabs/camel,neoramon/camel,tadayosi/camel,NickCis/camel,jamesnetherton/camel,yury-vashchyla/camel,askannon/camel,jmandawg/camel,FingolfinTEK/camel,bgaudaen/camel,CandleCandle/camel,mike-kukla/camel,tadayosi/camel,yury-vashchyla/camel,pkletsko/camel,grgrzybek/camel,qst-jdc-labs/camel,cunningt/camel,mike-kukla/camel,RohanHart/camel,mgyongyosi/camel,apache/camel,pkletsko/camel,jamesnetherton/camel,jonmcewen/camel,allancth/camel,onders86/camel,chanakaudaya/camel,yogamaha/camel,jmandawg/camel,jpav/camel,christophd/camel,grange74/camel,borcsokj/camel,hqstevenson/camel,pax95/camel,nboukhed/camel,bgaudaen/camel,gnodet/camel,atoulme/camel,salikjan/camel,yuruki/camel,tlehoux/camel,akhettar/camel,chirino/camel,lowwool/camel,johnpoth/camel,mohanaraosv/camel,sirlatrom/camel,woj-i/camel,veithen/camel,cunningt/camel,brreitme/camel,FingolfinTEK/camel,manuelh9r/camel,davidkarlsen/camel,arnaud-deprez/camel,cunningt/camel,Thopap/camel,cunningt/camel,iweiss/camel,nboukhed/camel,mohanaraosv/camel,sebi-hgdata/camel,JYBESSON/camel,adessaigne/camel,jkorab/camel,isavin/camel,prashant2402/camel,drsquidop/camel,atoulme/camel,royopa/camel,rparree/camel,chanakaudaya/camel,apache/camel,jonmcewen/camel,nikvaessen/camel,sebi-hgdata/camel,jlpedrosa/camel,CandleCandle/camel,kevinearls/camel,ssharma/camel,ge0ffrey/camel,DariusX/camel,snurmine/camel,cunningt/camel,jkorab/camel,FingolfinTEK/camel,skinzer/camel,oalles/camel,YMartsynkevych/camel,NetNow/camel,gilfernandes/camel,mnki/camel,sverkera/camel,rparree/camel,veithen/camel,noelo/camel,josefkarasek/camel,punkhorn/camel-upstream,jkorab/camel,chirino/camel,snurmine/camel,eformat/camel,satishgummadelli/camel,rparree/camel,lasombra/camel,rmarting/camel,tkopczynski/camel,haku/camel,ssharma/camel,sabre1041/camel,jlpedrosa/camel,haku/camel,dvankleef/camel,jameszkw/camel,mike-kukla/camel,rmarting/camel,Thopap/camel,MohammedHammam/camel,jarst/camel,tarilabs/camel,RohanHart/camel,mzapletal/camel,josefkarasek/camel,curso007/camel,tadayosi/camel,zregvart/camel,iweiss/camel,mgyongyosi/camel,allancth/camel,nboukhed/camel,prashant2402/camel,sirlatrom/camel,borcsokj/camel,oalles/camel,tadayosi/camel,eformat/camel,snurmine/camel,duro1/camel,jollygeorge/camel,adessaigne/camel,jonmcewen/camel,RohanHart/camel,borcsokj/camel,manuelh9r/camel,MrCoder/camel,yogamaha/camel,pmoerenhout/camel,punkhorn/camel-upstream,woj-i/camel,arnaud-deprez/camel,zregvart/camel,zregvart/camel,anton-k11/camel,erwelch/camel,chanakaudaya/camel,atoulme/camel,YoshikiHigo/camel,onders86/camel,anton-k11/camel,christophd/camel,chirino/camel,yury-vashchyla/camel,yuruki/camel,NetNow/camel,brreitme/camel,johnpoth/camel,skinzer/camel,allancth/camel,anoordover/camel,satishgummadelli/camel,NickCis/camel,scranton/camel,NickCis/camel,pax95/camel,coderczp/camel,onders86/camel,manuelh9r/camel,driseley/camel,alvinkwekel/camel,curso007/camel,Thopap/camel,oalles/camel,acartapanis/camel,YoshikiHigo/camel,ullgren/camel,dkhanolkar/camel,mcollovati/camel,gilfernandes/camel,borcsokj/camel,grange74/camel,tlehoux/camel,mike-kukla/camel,tkopczynski/camel,jameszkw/camel,maschmid/camel,woj-i/camel,trohovsky/camel,grange74/camel,davidkarlsen/camel,sirlatrom/camel,oalles/camel,pax95/camel,driseley/camel,rmarting/camel,christophd/camel,erwelch/camel,bhaveshdt/camel,CandleCandle/camel,sabre1041/camel,eformat/camel,lburgazzoli/apache-camel,sebi-hgdata/camel,RohanHart/camel,josefkarasek/camel,kevinearls/camel,ekprayas/camel,duro1/camel,johnpoth/camel,jmandawg/camel,pmoerenhout/camel,mnki/camel,coderczp/camel,askannon/camel,grgrzybek/camel,mzapletal/camel,mgyongyosi/camel,bhaveshdt/camel,nikvaessen/camel,rparree/camel,yogamaha/camel,gyc567/camel,MrCoder/camel,isururanawaka/camel,dpocock/camel,oalles/camel,dmvolod/camel,dmvolod/camel,alvinkwekel/camel,Thopap/camel,lasombra/camel,lasombra/camel,iweiss/camel,edigrid/camel,duro1/camel,Fabryprog/camel,veithen/camel,skinzer/camel,dvankleef/camel,MrCoder/camel,askannon/camel,arnaud-deprez/camel,davidwilliams1978/camel,adessaigne/camel,dsimansk/camel,akhettar/camel,gyc567/camel,dvankleef/camel,acartapanis/camel,maschmid/camel,lburgazzoli/camel,lburgazzoli/camel,bfitzpat/camel,partis/camel,isavin/camel,JYBESSON/camel,bdecoste/camel,RohanHart/camel,YoshikiHigo/camel,ge0ffrey/camel,yuruki/camel,gautric/camel,jlpedrosa/camel,grgrzybek/camel,YMartsynkevych/camel,MrCoder/camel,MohammedHammam/camel,woj-i/camel,akhettar/camel,dsimansk/camel,grgrzybek/camel,tkopczynski/camel,ekprayas/camel,mnki/camel,tlehoux/camel,duro1/camel,YMartsynkevych/camel,anton-k11/camel,jpav/camel,adessaigne/camel,allancth/camel,apache/camel,MrCoder/camel,ge0ffrey/camel,haku/camel,qst-jdc-labs/camel,anoordover/camel,alvinkwekel/camel,objectiser/camel,grange74/camel,dmvolod/camel,isavin/camel,prashant2402/camel,dsimansk/camel,partis/camel,jpav/camel,FingolfinTEK/camel,prashant2402/camel,adessaigne/camel,NetNow/camel,woj-i/camel,davidwilliams1978/camel,curso007/camel,jollygeorge/camel,lasombra/camel,sverkera/camel,gyc567/camel,gyc567/camel,bfitzpat/camel,brreitme/camel,jamesnetherton/camel,curso007/camel,tarilabs/camel,gnodet/camel,rmarting/camel,tadayosi/camel,davidkarlsen/camel,tadayosi/camel,rmarting/camel,arnaud-deprez/camel,jonmcewen/camel,DariusX/camel,mnki/camel,ssharma/camel,JYBESSON/camel
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.hazelcast; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.apache.camel.Exchange; public final class HazelcastComponentHelper { private final HashMap<String, Integer> mapping = new HashMap<String, Integer>(); public HazelcastComponentHelper() { this.init(); } public static void copyHeaders(Exchange ex) { // get in headers Map<String, Object> headers = ex.getIn().getHeaders(); // delete item id if (headers.containsKey(HazelcastConstants.OBJECT_ID)) { headers.remove(HazelcastConstants.OBJECT_ID); } if (headers.containsKey(HazelcastConstants.OPERATION)) { headers.remove(HazelcastConstants.OPERATION); } // propagate headers if OUT message created if (ex.hasOut()) { ex.getOut().setHeaders(headers); } } public static void setListenerHeaders(Exchange ex, String listenerType, String listenerAction, String cacheName) { ex.getIn().setHeader(HazelcastConstants.CACHE_NAME, cacheName); HazelcastComponentHelper.setListenerHeaders(ex, listenerType, listenerAction); } public static void setListenerHeaders(Exchange ex, String listenerType, String listenerAction) { ex.getIn().setHeader(HazelcastConstants.LISTENER_ACTION, listenerAction); ex.getIn().setHeader(HazelcastConstants.LISTENER_TYPE, listenerType); ex.getIn().setHeader(HazelcastConstants.LISTENER_TIME, new Date().getTime()); } public int lookupOperationNumber(Exchange exchange, int defaultOperation) { return extractOperationNumber(exchange.getIn().getHeader(HazelcastConstants.OPERATION), defaultOperation); } public int extractOperationNumber(Object value, int defaultOperation) { int operation = defaultOperation; if (value instanceof String) { operation = mapToOperationNumber((String) value); } else if (value instanceof Integer) { operation = (Integer)value; } return operation; } /** * Allows the use of speaking operation names (e.g. for usage in Spring DSL) */ private int mapToOperationNumber(String operationName) { if (this.mapping.containsKey(operationName)) { return this.mapping.get(operationName); } else { throw new IllegalArgumentException(String.format("Operation '%s' is not supported by this component.", operationName)); } } private void init() { // fill map with values addMapping("put", HazelcastConstants.PUT_OPERATION); addMapping("delete", HazelcastConstants.DELETE_OPERATION); addMapping("get", HazelcastConstants.GET_OPERATION); addMapping("update", HazelcastConstants.UPDATE_OPERATION); addMapping("query", HazelcastConstants.QUERY_OPERATION); addMapping("getAll", HazelcastConstants.GET_ALL_OPERATION); addMapping("clear", HazelcastConstants.CLEAR_OPERATION); addMapping("evict", HazelcastConstants.EVICT_OPERATION); addMapping("evictAll", HazelcastConstants.EVICT_ALL_OPERATION); addMapping("putIfAbsent", HazelcastConstants.PUT_IF_ABSENT_OPERATION); addMapping("addAll", HazelcastConstants.ADD_ALL_OPERATION); addMapping("removeAll", HazelcastConstants.REMOVE_ALL_OPERATION); addMapping("retainAll", HazelcastConstants.RETAIN_ALL_OPERATION); // multimap addMapping("removevalue", HazelcastConstants.REMOVEVALUE_OPERATION); // atomic numbers addMapping("increment", HazelcastConstants.INCREMENT_OPERATION); addMapping("decrement", HazelcastConstants.DECREMENT_OPERATION); addMapping("setvalue", HazelcastConstants.SETVALUE_OPERATION); addMapping("destroy", HazelcastConstants.DESTROY_OPERATION); // queue addMapping("add", HazelcastConstants.ADD_OPERATION); addMapping("offer", HazelcastConstants.OFFER_OPERATION); addMapping("peek", HazelcastConstants.PEEK_OPERATION); addMapping("poll", HazelcastConstants.POLL_OPERATION); // topic addMapping("publish", HazelcastConstants.PUBLISH_OPERATION); } private void addMapping(String operationName, int operationNumber) { this.mapping.put(operationName, operationNumber); this.mapping.put(String.valueOf(operationNumber), operationNumber); } }
components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/HazelcastComponentHelper.java
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.hazelcast; import java.util.Date; import java.util.HashMap; import java.util.Map; import org.apache.camel.Exchange; public final class HazelcastComponentHelper { private final HashMap<String, Integer> mapping = new HashMap<String, Integer>(); public HazelcastComponentHelper() { this.init(); } public static void copyHeaders(Exchange ex) { // get in headers Map<String, Object> headers = ex.getIn().getHeaders(); // delete item id if (headers.containsKey(HazelcastConstants.OBJECT_ID)) { headers.remove(HazelcastConstants.OBJECT_ID); } if (headers.containsKey(HazelcastConstants.OPERATION)) { headers.remove(HazelcastConstants.OPERATION); } // propagate headers if OUT message created if (ex.hasOut()) { ex.getOut().setHeaders(headers); } } public static void setListenerHeaders(Exchange ex, String listenerType, String listenerAction, String cacheName) { ex.getIn().setHeader(HazelcastConstants.CACHE_NAME, cacheName); HazelcastComponentHelper.setListenerHeaders(ex, listenerType, listenerAction); } public static void setListenerHeaders(Exchange ex, String listenerType, String listenerAction) { ex.getIn().setHeader(HazelcastConstants.LISTENER_ACTION, listenerAction); ex.getIn().setHeader(HazelcastConstants.LISTENER_TYPE, listenerType); ex.getIn().setHeader(HazelcastConstants.LISTENER_TIME, new Date().getTime()); } public int lookupOperationNumber(Exchange exchange, int defaultOperation) { return extractOperationNumber(exchange.getIn().getHeader(HazelcastConstants.OPERATION), defaultOperation); } public int extractOperationNumber(Object value, int defaultOperation) { int operation = defaultOperation; if (value instanceof String) { operation = mapToOperationNumber((String) value); } else if (value instanceof Integer) { operation = (Integer)value; } return operation; } /** * Allows the use of speaking operation names (e.g. for usage in Spring DSL) */ private int mapToOperationNumber(String operationName) { if (this.mapping.containsKey(operationName)) { return this.mapping.get(operationName); } else { throw new IllegalArgumentException(String.format("Operation '%s' is not supported by this component.", operationName)); } } private void init() { // fill map with values addMapping("put", HazelcastConstants.PUT_OPERATION); addMapping("delete", HazelcastConstants.DELETE_OPERATION); addMapping("get", HazelcastConstants.GET_OPERATION); addMapping("update", HazelcastConstants.UPDATE_OPERATION); addMapping("query", HazelcastConstants.QUERY_OPERATION); addMapping("getAll", HazelcastConstants.GET_ALL_OPERATION); addMapping("clear", HazelcastConstants.CLEAR_OPERATION); // multimap addMapping("removevalue", HazelcastConstants.REMOVEVALUE_OPERATION); // atomic numbers addMapping("increment", HazelcastConstants.INCREMENT_OPERATION); addMapping("decrement", HazelcastConstants.DECREMENT_OPERATION); addMapping("setvalue", HazelcastConstants.SETVALUE_OPERATION); addMapping("destroy", HazelcastConstants.DESTROY_OPERATION); // queue addMapping("add", HazelcastConstants.ADD_OPERATION); addMapping("offer", HazelcastConstants.OFFER_OPERATION); addMapping("peek", HazelcastConstants.PEEK_OPERATION); addMapping("poll", HazelcastConstants.POLL_OPERATION); // topic addMapping("publish", HazelcastConstants.PUBLISH_OPERATION); } private void addMapping(String operationName, int operationNumber) { this.mapping.put(operationName, operationNumber); this.mapping.put(String.valueOf(operationNumber), operationNumber); } }
CAMEL-8858 Camel-Hazelcast: HazelcastComponentHelper miss some operations in mapping
components/camel-hazelcast/src/main/java/org/apache/camel/component/hazelcast/HazelcastComponentHelper.java
CAMEL-8858 Camel-Hazelcast: HazelcastComponentHelper miss some operations in mapping