lang
stringclasses 1
value | license
stringclasses 13
values | stderr
stringlengths 0
350
| commit
stringlengths 40
40
| returncode
int64 0
128
| repos
stringlengths 7
45.1k
| new_contents
stringlengths 0
1.87M
| new_file
stringlengths 6
292
| old_contents
stringlengths 0
1.87M
| message
stringlengths 6
9.26k
| old_file
stringlengths 6
292
| subject
stringlengths 0
4.45k
|
---|---|---|---|---|---|---|---|---|---|---|---|
Java | lgpl-2.1 | 3dbe6ccdeda092f3c32cb0c81bbe8c5a1af4cccb | 0 | Ghostlyr/MinecraftForge,bonii-xx/MinecraftForge,dmf444/MinecraftForge,brubo1/MinecraftForge,simon816/MinecraftForge,karlthepagan/MinecraftForge,shadekiller666/MinecraftForge,mickkay/MinecraftForge,jdpadrnos/MinecraftForge,Mathe172/MinecraftForge,CrafterKina/MinecraftForge,blay09/MinecraftForge,fcjailybo/MinecraftForge,Theerapak/MinecraftForge,RainWarrior/MinecraftForge,Zaggy1024/MinecraftForge,Vorquel/MinecraftForge,ThiagoGarciaAlves/MinecraftForge,luacs1998/MinecraftForge | package cpw.mods.fml.relauncher;
import java.security.Permission;
/**
* A custom security manager stopping certain events from happening
* unexpectedly.
*
* @author cpw
*
*/
public class FMLSecurityManager extends SecurityManager {
@Override
public void checkPermission(Permission perm)
{
String permName = perm.getName() != null ? perm.getName() : "missing";
if (permName.startsWith("exitVM"))
{
Class<?>[] classContexts = getClassContext();
String callingClass = classContexts.length > 3 ? classContexts[4].getName() : "none";
String callingParent = classContexts.length > 4 ? classContexts[5].getName() : "none";
// FML is allowed to call system exit and the Minecraft applet (from the quit button)
if (!(callingClass.startsWith("cpw.mods.fml.") || ( "net.minecraft.client.Minecraft".equals(callingClass) && "net.minecraft.client.Minecraft".equals(callingParent)) || ("net.minecraft.server.dedicated.DedicatedServer".equals(callingClass) && "net.minecraft.server.MinecraftServer".equals(callingParent))))
{
throw new ExitTrappedException();
}
}
else if ("setSecurityManager".equals(permName))
{
throw new SecurityException("Cannot replace the FML security manager");
}
return;
}
public static class ExitTrappedException extends SecurityException {
private static final long serialVersionUID = 1L;
}
}
| fml/src/main/java/cpw/mods/fml/relauncher/FMLSecurityManager.java | package cpw.mods.fml.relauncher;
import java.security.Permission;
/**
* A custom security manager stopping certain events from happening
* unexpectedly.
*
* @author cpw
*
*/
public class FMLSecurityManager extends SecurityManager {
@Override
public void checkPermission(Permission perm)
{
String permName = perm.getName() != null ? perm.getName() : "missing";
if (permName.startsWith("exitVM"))
{
String callingClass = getClassContext()[4].getName();
// FML is allowed to call system exit
if (!callingClass.startsWith("cpw.mods.fml."))
{
throw new ExitTrappedException();
}
}
else if ("setSecurityManager".equals(permName))
{
throw new SecurityException("Cannot replace the FML security manager");
}
return;
}
public static class ExitTrappedException extends SecurityException {
private static final long serialVersionUID = 1L;
}
}
| Fix up other exit points. Should stop process hangs for clean exits.
| fml/src/main/java/cpw/mods/fml/relauncher/FMLSecurityManager.java | Fix up other exit points. Should stop process hangs for clean exits. |
|
Java | lgpl-2.1 | 4cc343b53fc3b055a18be3d9e47a8002bb8db74f | 0 | xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,xwiki/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform,pbondoer/xwiki-platform | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.rendering.internal.parser.pygments;
import java.io.IOException;
import java.io.Reader;
import java.net.URL;
import java.net.URLDecoder;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.apache.commons.io.IOUtils;
import org.python.core.Py;
import org.python.core.PyObject;
import org.python.core.PyUnicode;
import org.python.util.PythonInterpreter;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.rendering.block.Block;
import org.xwiki.rendering.block.VerbatimInlineBlock;
import org.xwiki.rendering.parser.AbstractHighlightParser;
import org.xwiki.rendering.parser.ParseException;
import org.xwiki.rendering.parser.Syntax;
import org.xwiki.rendering.parser.SyntaxType;
/**
* Highlight provided source using Pygments.
*
* @version $Id$
* @since 1.7RC1
*/
public class PygmentsParser extends AbstractHighlightParser implements Initializable
{
/**
* A Pygments .py file to search for the location of the jar.
*/
private static final String LEXER_PY = "Lib/pygments/lexer.py";
/**
* A Pygments .py file to search for the location of the jar.
*/
private static final String XDOMFORMATTER_PY = "Lib/pygments/formatters/xdom.py";
/**
* The name of the lexer variable in PPython code.
*/
private static final String PY_LEXER_VARNAME = "lexer";
/**
* The name of the formatter variable in PPython code.
*/
private static final String PY_FORMATTER_VARNAME = "formatter";
/**
* The name of the listener variable in PPython code.
*/
private static final String PY_LISTENER_VARNAME = "listener";
/**
* The name of the variable containing the source code to highlight in PPython code.
*/
private static final String PY_CODE_VARNAME = "code";
/**
* Python code to create the lexer.
*/
private static final String PY_LEXER_CREATE =
PY_LEXER_VARNAME + " = pygments.lexers.get_lexer_by_name(\"{0}\", stripall=True)";
/**
* Python code to find the lexer from source.
*/
private static final String PY_LEXER_FIND =
PY_LEXER_VARNAME + " = None\n" + "try:\n" + " " + PY_LEXER_VARNAME + " = guess_lexer(code, stripall=True)\n"
+ "except ClassNotFound:\n" + " pass";
/**
* Java jar URL special characters.
*/
private static final String JAR_URL_PREFIX = "jar:file:";
/**
* Jar path separator.
*/
private static final String JAR_SEPARATOR = "!";
/**
* The character use to separate URL parts.
*/
private static final String URL_SEPARATOR = "/";
/**
* The syntax identifier.
*/
private Syntax syntax;
/**
* The Python interpreter used to execute Pygments.
*/
private PythonInterpreter pythonInterpreter;
/**
* List all allowed languages.
* <p>
* This is only needed since Jython 2.5 is too slow for other languages to be parser in good conditions.
*/
private Set<String> allowedLanguages = new HashSet<String>(Arrays.asList("html", "xml", "php", "html+php"));
/**
* {@inheritDoc}
*
* @see org.xwiki.component.phase.Initializable#initialize()
*/
public void initialize() throws InitializationException
{
this.syntax = new Syntax(SyntaxType.getSyntaxType(getSyntaxId() + "-highlight"), "1.0");
System.setProperty("python.home", findPygmentsPath());
this.pythonInterpreter = new PythonInterpreter();
// imports Pygments
this.pythonInterpreter.exec("import pygments");
this.pythonInterpreter.execfile(getClass().getClassLoader().getResourceAsStream(XDOMFORMATTER_PY));
this.pythonInterpreter.exec("from pygments.lexers import guess_lexer");
this.pythonInterpreter.exec("from pygments.util import ClassNotFound");
}
/**
* {@inheritDoc}
*
* @see org.xwiki.rendering.parser.Parser#getSyntax()
*/
public Syntax getSyntax()
{
return this.syntax;
}
/**
* {@inheritDoc}
*
* @see org.xwiki.rendering.parser.HighlightParser#highlight(java.lang.String, java.io.Reader)
*/
public List<Block> highlight(String syntaxId, Reader source) throws ParseException
{
PythonInterpreter interpreter = getPythonInterpreter();
BlocksGeneratorPygmentsListener listener = new BlocksGeneratorPygmentsListener();
String code;
try {
code = IOUtils.toString(source);
} catch (IOException e) {
throw new ParseException("Failed to read source", e);
}
interpreter.set(PY_LISTENER_VARNAME, listener);
interpreter.set(PY_CODE_VARNAME, new PyUnicode(code));
if (syntaxId != null && syntaxId.length() > 0 && allowedLanguages.contains(syntaxId.toLowerCase())) {
interpreter.exec(MessageFormat.format(PY_LEXER_CREATE, syntaxId));
}
// TODO: restore the following code when Jython speed problem will be fixed
// else {
// interpreter.exec(PY_LEXER_FIND);
// }
PyObject lexer = interpreter.get(PY_LEXER_VARNAME);
if (lexer == null || lexer == Py.None) {
// No lexer found
if (getLogger().isDebugEnabled()) {
getLogger().debug("no lexer found");
}
return Collections.<Block> singletonList(new VerbatimInlineBlock(code));
}
interpreter.exec(MessageFormat.format("{0} = XDOMFormatter({1})", PY_FORMATTER_VARNAME, PY_LISTENER_VARNAME));
interpreter.exec(MessageFormat.format("pygments.highlight({0}, {1}, {2})", PY_CODE_VARNAME, PY_LEXER_VARNAME,
PY_FORMATTER_VARNAME));
List<String> vars = Arrays.asList(PY_LISTENER_VARNAME, PY_CODE_VARNAME, PY_LEXER_VARNAME, PY_FORMATTER_VARNAME);
for (String var : vars) {
interpreter.exec("del " + var);
}
return listener.getBlocks();
}
/**
* @return the python interpreter.
*/
protected PythonInterpreter getPythonInterpreter()
{
return this.pythonInterpreter;
}
/**
* Get the full URL root path of provided Python file.
*
* @param fileToFind the Python file to find in the classpath.
* @return the root URL path.
*/
private String findPath(String fileToFind)
{
URL url = getClass().getResource(URL_SEPARATOR + fileToFind);
String urlString = URLDecoder.decode(url.toString());
// we expect an URL like
// jar:file:/jar_dir/jython-lib.jar!/Lib/pygments/lexer.py
int jarSeparatorIndex = urlString.indexOf(JAR_SEPARATOR);
if (urlString.startsWith(JAR_URL_PREFIX) && jarSeparatorIndex > 0) {
urlString = urlString.substring(JAR_URL_PREFIX.length(), jarSeparatorIndex);
} else {
// Just in case we don't get a jar URL
int begin = urlString.indexOf(URL_SEPARATOR);
int lexerPyIndex = urlString.lastIndexOf(fileToFind);
urlString = urlString.substring(begin, lexerPyIndex);
if (urlString.endsWith(URL_SEPARATOR)) {
urlString = urlString.substring(0, urlString.length() - 1);
}
if (urlString.endsWith(JAR_SEPARATOR)) {
urlString = urlString.substring(0, urlString.length() - 1);
}
}
return urlString;
}
/**
* Determine and register the home of the Pygments Pyton files.
*
* @return the root path of Pygments Pyton files.
*/
private String findPygmentsPath()
{
return findPath(LEXER_PY);
}
}
| xwiki-macros/xwiki-macro-code/src/main/java/org/xwiki/rendering/internal/parser/pygments/PygmentsParser.java | /*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this software; if not, write to the Free
* Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
* 02110-1301 USA, or see the FSF site: http://www.fsf.org.
*/
package org.xwiki.rendering.internal.parser.pygments;
import java.io.IOException;
import java.io.Reader;
import java.net.URL;
import java.net.URLDecoder;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.python.core.Py;
import org.python.core.PyObject;
import org.python.core.PyUnicode;
import org.python.util.PythonInterpreter;
import org.xwiki.component.phase.Initializable;
import org.xwiki.component.phase.InitializationException;
import org.xwiki.rendering.block.Block;
import org.xwiki.rendering.block.VerbatimInlineBlock;
import org.xwiki.rendering.parser.AbstractHighlightParser;
import org.xwiki.rendering.parser.ParseException;
import org.xwiki.rendering.parser.Syntax;
import org.xwiki.rendering.parser.SyntaxType;
/**
* Highlight provided source using Pygments.
*
* @version $Id$
* @since 1.7RC1
*/
public class PygmentsParser extends AbstractHighlightParser implements Initializable
{
/**
* A Pygments .py file to search for the location of the jar.
*/
private static final String LEXER_PY = "Lib/pygments/lexer.py";
/**
* A Pygments .py file to search for the location of the jar.
*/
private static final String XDOMFORMATTER_PY = "Lib/pygments/formatters/xdom.py";
/**
* The name of the lexer variable in PPython code.
*/
private static final String PY_LEXER_VARNAME = "lexer";
/**
* The name of the formatter variable in PPython code.
*/
private static final String PY_FORMATTER_VARNAME = "formatter";
/**
* The name of the listener variable in PPython code.
*/
private static final String PY_LISTENER_VARNAME = "listener";
/**
* The name of the variable containing the source code to highlight in PPython code.
*/
private static final String PY_CODE_VARNAME = "code";
/**
* Python code to create the lexer.
*/
private static final String PY_LEXER_CREATE =
PY_LEXER_VARNAME + " = pygments.lexers.get_lexer_by_name(\"{0}\", stripall=True)";
/**
* Python code to find the lexer from source.
*/
private static final String PY_LEXER_FIND =
PY_LEXER_VARNAME + " = None\n" + "try:\n" + " " + PY_LEXER_VARNAME + " = guess_lexer(code, stripall=True)\n"
+ "except ClassNotFound:\n" + " pass";
/**
* Java jar URL special characters.
*/
private static final String JAR_URL_PREFIX = "jar:file:";
/**
* Jar path separator.
*/
private static final String JAR_SEPARATOR = "!";
/**
* The character use to separate URL parts.
*/
private static final String URL_SEPARATOR = "/";
/**
* The syntax identifier.
*/
private Syntax syntax;
/**
* The Python interpreter used to execute Pygments.
*/
private PythonInterpreter pythonInterpreter;
/**
* List all allowed languages.
* <p>
* This is only needed since Jython 2.5 is too slow for other languages to be parser in good conditions.
*/
private Set<String> allowedLanguages = new HashSet<String>(Arrays.asList("html", "xml", "php", "html+php"));
/**
* {@inheritDoc}
*
* @see org.xwiki.component.phase.Initializable#initialize()
*/
public void initialize() throws InitializationException
{
this.syntax = new Syntax(SyntaxType.getSyntaxType(getSyntaxId() + "-highlight"), "1.0");
System.setProperty("python.home", findPygmentsPath());
this.pythonInterpreter = new PythonInterpreter();
// imports Pygments
this.pythonInterpreter.exec("import pygments");
this.pythonInterpreter.execfile(getClass().getClassLoader().getResourceAsStream(XDOMFORMATTER_PY));
this.pythonInterpreter.exec("from pygments.lexers import guess_lexer");
this.pythonInterpreter.exec("from pygments.util import ClassNotFound");
}
/**
* {@inheritDoc}
*
* @see org.xwiki.rendering.parser.Parser#getSyntax()
*/
public Syntax getSyntax()
{
return this.syntax;
}
/**
* {@inheritDoc}
*
* @see org.xwiki.rendering.parser.HighlightParser#highlight(java.lang.String, java.io.Reader)
*/
public List<Block> highlight(String syntaxId, Reader source) throws ParseException
{
PythonInterpreter interpreter = getPythonInterpreter();
BlocksGeneratorPygmentsListener listener = new BlocksGeneratorPygmentsListener();
StringBuffer sb = new StringBuffer();
try {
for (char[] buffer = new char[4096]; source.read(buffer, 0, 4096) > 0;) {
sb.append(buffer);
}
} catch (IOException e) {
throw new ParseException("Failed to read source", e);
}
String code = sb.toString();
interpreter.set(PY_LISTENER_VARNAME, listener);
interpreter.set(PY_CODE_VARNAME, new PyUnicode(code));
if (syntaxId != null && syntaxId.length() > 0 && allowedLanguages.contains(syntaxId.toLowerCase())) {
interpreter.exec(MessageFormat.format(PY_LEXER_CREATE, syntaxId));
}
// TODO: restore the following code when Jython speed problem will be fixed
// else {
// interpreter.exec(PY_LEXER_FIND);
// }
PyObject lexer = interpreter.get(PY_LEXER_VARNAME);
if (lexer == null || lexer == Py.None) {
// No lexer found
if (getLogger().isDebugEnabled()) {
getLogger().debug("no lexer found");
}
return Collections.<Block> singletonList(new VerbatimInlineBlock(code));
}
interpreter.exec(MessageFormat.format("{0} = XDOMFormatter({1})", PY_FORMATTER_VARNAME, PY_LISTENER_VARNAME));
interpreter.exec(MessageFormat.format("pygments.highlight({0}, {1}, {2})", PY_CODE_VARNAME, PY_LEXER_VARNAME,
PY_FORMATTER_VARNAME));
List<String> vars = Arrays.asList(PY_LISTENER_VARNAME, PY_CODE_VARNAME, PY_LEXER_VARNAME, PY_FORMATTER_VARNAME);
for (String var : vars) {
interpreter.exec("del " + var);
}
return listener.getBlocks();
}
/**
* @return the python interpreter.
*/
protected PythonInterpreter getPythonInterpreter()
{
return this.pythonInterpreter;
}
/**
* Get the full URL root path of provided Python file.
*
* @param fileToFind the Python file to find in the classpath.
* @return the root URL path.
*/
private String findPath(String fileToFind)
{
URL url = getClass().getResource(URL_SEPARATOR + fileToFind);
String urlString = URLDecoder.decode(url.toString());
// we expect an URL like
// jar:file:/jar_dir/jython-lib.jar!/Lib/pygments/lexer.py
int jarSeparatorIndex = urlString.indexOf(JAR_SEPARATOR);
if (urlString.startsWith(JAR_URL_PREFIX) && jarSeparatorIndex > 0) {
urlString = urlString.substring(JAR_URL_PREFIX.length(), jarSeparatorIndex);
} else {
// Just in case we don't get a jar URL
int begin = urlString.indexOf(URL_SEPARATOR);
int lexerPyIndex = urlString.lastIndexOf(fileToFind);
urlString = urlString.substring(begin, lexerPyIndex);
if (urlString.endsWith(URL_SEPARATOR)) {
urlString = urlString.substring(0, urlString.length() - 1);
}
if (urlString.endsWith(JAR_SEPARATOR)) {
urlString = urlString.substring(0, urlString.length() - 1);
}
}
return urlString;
}
/**
* Determine and register the home of the Pygments Pyton files.
*
* @return the root path of Pygments Pyton files.
*/
private String findPygmentsPath()
{
return findPath(LEXER_PY);
}
}
| [cleanup] Improve codestyle
git-svn-id: d23d7a6431d93e1bdd218a46658458610974b053@14913 f329d543-caf0-0310-9063-dda96c69346f
| xwiki-macros/xwiki-macro-code/src/main/java/org/xwiki/rendering/internal/parser/pygments/PygmentsParser.java | [cleanup] Improve codestyle |
|
Java | apache-2.0 | 76e0e5238f46dce24373a9f1fab4142028ea1e28 | 0 | treasure-data/presto,dain/presto,electrum/presto,smartnews/presto,erichwang/presto,smartnews/presto,ebyhr/presto,ebyhr/presto,11xor6/presto,erichwang/presto,Praveen2112/presto,smartnews/presto,losipiuk/presto,Praveen2112/presto,treasure-data/presto,electrum/presto,treasure-data/presto,hgschmie/presto,erichwang/presto,erichwang/presto,dain/presto,hgschmie/presto,electrum/presto,Praveen2112/presto,losipiuk/presto,hgschmie/presto,erichwang/presto,martint/presto,martint/presto,losipiuk/presto,treasure-data/presto,Praveen2112/presto,dain/presto,electrum/presto,ebyhr/presto,electrum/presto,dain/presto,martint/presto,11xor6/presto,smartnews/presto,11xor6/presto,Praveen2112/presto,losipiuk/presto,martint/presto,martint/presto,11xor6/presto,treasure-data/presto,hgschmie/presto,smartnews/presto,dain/presto,ebyhr/presto,losipiuk/presto,11xor6/presto,ebyhr/presto,hgschmie/presto,treasure-data/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive.metastore.thrift;
import com.google.common.net.HostAndPort;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import io.airlift.configuration.LegacyConfig;
import io.airlift.units.Duration;
import io.prestosql.plugin.hive.util.RetryDriver;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import java.util.concurrent.TimeUnit;
public class ThriftMetastoreConfig
{
private Duration metastoreTimeout = new Duration(10, TimeUnit.SECONDS);
private HostAndPort socksProxy;
private int maxRetries = RetryDriver.DEFAULT_MAX_ATTEMPTS - 1;
private double backoffScaleFactor = RetryDriver.DEFAULT_SCALE_FACTOR;
private Duration minBackoffDelay = RetryDriver.DEFAULT_SLEEP_TIME;
private Duration maxBackoffDelay = RetryDriver.DEFAULT_SLEEP_TIME;
private Duration maxRetryTime = RetryDriver.DEFAULT_MAX_RETRY_TIME;
private boolean impersonationEnabled;
@NotNull
public Duration getMetastoreTimeout()
{
return metastoreTimeout;
}
@Config("hive.metastore-timeout")
public ThriftMetastoreConfig setMetastoreTimeout(Duration metastoreTimeout)
{
this.metastoreTimeout = metastoreTimeout;
return this;
}
public HostAndPort getSocksProxy()
{
return socksProxy;
}
@Config("hive.metastore.thrift.client.socks-proxy")
public ThriftMetastoreConfig setSocksProxy(HostAndPort socksProxy)
{
this.socksProxy = socksProxy;
return this;
}
@Min(0)
public int getMaxRetries()
{
return maxRetries;
}
@Config("hive.metastore.thrift.client.max-retries")
@ConfigDescription("Maximum number of retry attempts for metastore requests")
public ThriftMetastoreConfig setMaxRetries(int maxRetries)
{
this.maxRetries = maxRetries;
return this;
}
public double getBackoffScaleFactor()
{
return backoffScaleFactor;
}
@Config("hive.metastore.thrift.client.backoff-scale-factor")
@ConfigDescription("Scale factor for metastore request retry delay")
public ThriftMetastoreConfig setBackoffScaleFactor(double backoffScaleFactor)
{
this.backoffScaleFactor = backoffScaleFactor;
return this;
}
@NotNull
public Duration getMaxRetryTime()
{
return maxRetryTime;
}
@Config("hive.metastore.thrift.client.max-retry-time")
@ConfigDescription("Total time limit for a metastore request to be retried")
public ThriftMetastoreConfig setMaxRetryTime(Duration maxRetryTime)
{
this.maxRetryTime = maxRetryTime;
return this;
}
public Duration getMinBackoffDelay()
{
return minBackoffDelay;
}
@Config("hive.metastore.thrift.client.min-backoff-delay")
@ConfigDescription("Minimum delay between metastore request retries")
public ThriftMetastoreConfig setMinBackoffDelay(Duration minBackoffDelay)
{
this.minBackoffDelay = minBackoffDelay;
return this;
}
public Duration getMaxBackoffDelay()
{
return maxBackoffDelay;
}
@Config("hive.metastore.thrift.client.max-backoff-delay")
@ConfigDescription("Maximum delay between metastore request retries")
public ThriftMetastoreConfig setMaxBackoffDelay(Duration maxBackoffDelay)
{
this.maxBackoffDelay = maxBackoffDelay;
return this;
}
public boolean isImpersonationEnabled()
{
return impersonationEnabled;
}
@Config("hive.metastore.thrift.impersonation.enabled")
@LegacyConfig("hive.metastore.impersonation-enabled")
@ConfigDescription("Should end user be impersonated when communicating with metastore")
public ThriftMetastoreConfig setImpersonationEnabled(boolean impersonationEnabled)
{
this.impersonationEnabled = impersonationEnabled;
return this;
}
}
| presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreConfig.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive.metastore.thrift;
import com.google.common.net.HostAndPort;
import io.airlift.configuration.Config;
import io.airlift.configuration.ConfigDescription;
import io.airlift.units.Duration;
import io.prestosql.plugin.hive.util.RetryDriver;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
import java.util.concurrent.TimeUnit;
public class ThriftMetastoreConfig
{
private Duration metastoreTimeout = new Duration(10, TimeUnit.SECONDS);
private HostAndPort socksProxy;
private int maxRetries = RetryDriver.DEFAULT_MAX_ATTEMPTS - 1;
private double backoffScaleFactor = RetryDriver.DEFAULT_SCALE_FACTOR;
private Duration minBackoffDelay = RetryDriver.DEFAULT_SLEEP_TIME;
private Duration maxBackoffDelay = RetryDriver.DEFAULT_SLEEP_TIME;
private Duration maxRetryTime = RetryDriver.DEFAULT_MAX_RETRY_TIME;
private boolean impersonationEnabled;
@NotNull
public Duration getMetastoreTimeout()
{
return metastoreTimeout;
}
@Config("hive.metastore-timeout")
public ThriftMetastoreConfig setMetastoreTimeout(Duration metastoreTimeout)
{
this.metastoreTimeout = metastoreTimeout;
return this;
}
public HostAndPort getSocksProxy()
{
return socksProxy;
}
@Config("hive.metastore.thrift.client.socks-proxy")
public ThriftMetastoreConfig setSocksProxy(HostAndPort socksProxy)
{
this.socksProxy = socksProxy;
return this;
}
@Min(0)
public int getMaxRetries()
{
return maxRetries;
}
@Config("hive.metastore.thrift.client.max-retries")
@ConfigDescription("Maximum number of retry attempts for metastore requests")
public ThriftMetastoreConfig setMaxRetries(int maxRetries)
{
this.maxRetries = maxRetries;
return this;
}
public double getBackoffScaleFactor()
{
return backoffScaleFactor;
}
@Config("hive.metastore.thrift.client.backoff-scale-factor")
@ConfigDescription("Scale factor for metastore request retry delay")
public ThriftMetastoreConfig setBackoffScaleFactor(double backoffScaleFactor)
{
this.backoffScaleFactor = backoffScaleFactor;
return this;
}
@NotNull
public Duration getMaxRetryTime()
{
return maxRetryTime;
}
@Config("hive.metastore.thrift.client.max-retry-time")
@ConfigDescription("Total time limit for a metastore request to be retried")
public ThriftMetastoreConfig setMaxRetryTime(Duration maxRetryTime)
{
this.maxRetryTime = maxRetryTime;
return this;
}
public Duration getMinBackoffDelay()
{
return minBackoffDelay;
}
@Config("hive.metastore.thrift.client.min-backoff-delay")
@ConfigDescription("Minimum delay between metastore request retries")
public ThriftMetastoreConfig setMinBackoffDelay(Duration minBackoffDelay)
{
this.minBackoffDelay = minBackoffDelay;
return this;
}
public Duration getMaxBackoffDelay()
{
return maxBackoffDelay;
}
@Config("hive.metastore.thrift.client.max-backoff-delay")
@ConfigDescription("Maximum delay between metastore request retries")
public ThriftMetastoreConfig setMaxBackoffDelay(Duration maxBackoffDelay)
{
this.maxBackoffDelay = maxBackoffDelay;
return this;
}
public boolean isImpersonationEnabled()
{
return impersonationEnabled;
}
@Config("hive.metastore.thrift.impersonation.enabled")
@ConfigDescription("Should end user be impersonated when communicating with metastore")
public ThriftMetastoreConfig setImpersonationEnabled(boolean impersonationEnabled)
{
this.impersonationEnabled = impersonationEnabled;
return this;
}
}
| Use backward compatible feature toggle name
| presto-hive/src/main/java/io/prestosql/plugin/hive/metastore/thrift/ThriftMetastoreConfig.java | Use backward compatible feature toggle name |
|
Java | apache-2.0 | a0e01ef76d4ab334ad75fd52a7829245bada85d6 | 0 | OHDSI/WebAPI,OHDSI/WebAPI,OHDSI/WebAPI | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraphUtils;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.opencsv.CSVWriter;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.IterableUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.analysis.Utils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.common.generation.GenerateSqlResult;
import org.ohdsi.webapi.common.generation.GenerationUtils;
import org.ohdsi.webapi.ircalc.*;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.service.dto.AnalysisInfoDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisShortDTO;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.annotations.DataSourceAccess;
import org.ohdsi.webapi.shiro.annotations.SourceKey;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.shiro.management.datasource.SourceAccessor;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.CopyUtils;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import javax.annotation.PostConstruct;
import javax.servlet.ServletContext;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.ohdsi.webapi.Constants.GENERATE_IR_ANALYSIS;
import static org.ohdsi.webapi.Constants.Params.*;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
*
* @author Chris Knoll <[email protected]>
*/
@Component
public class IRAnalysisService extends AbstractDaoService implements GeneratesNotification, IRAnalysisResource {
private static final Logger log = LoggerFactory.getLogger(IRAnalysisService.class);
private final static String STRATA_STATS_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/incidencerate/sql/strata_stats.sql");
private static final String NAME = "irAnalysis";
private static final String NO_INCIDENCE_RATE_ANALYSIS_MESSAGE = "There is no incidence rate analysis with id = %d.";
private static final EntityGraph ANALYSIS_WITH_EXECUTION_INFO = EntityGraphUtils.fromName("IncidenceRateAnalysis.withExecutionInfoList");
private final IRAnalysisQueryBuilder queryBuilder;
@Autowired
private IncidenceRateAnalysisRepository irAnalysisRepository;
@Autowired
private IRExecutionInfoRepository irExecutionInfoRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private JobService jobService;
@Autowired
private Security security;
@Autowired
private SourceService sourceService;
@Autowired
private GenerationUtils generationUtils;
@Autowired
ConversionService conversionService;
@Autowired
private ObjectMapper objectMapper;
//Directly wired since IRAnalysisService is directly called by Jersey and @DataSourceAccess wouldn't work in this case
@Autowired
private SourceAccessor sourceAccessor;
@Context
ServletContext context;
public IRAnalysisService(final ObjectMapper objectMapper) {
this.queryBuilder = new IRAnalysisQueryBuilder(objectMapper);
}
private ExecutionInfo findExecutionInfoBySourceId(Collection<ExecutionInfo> infoList, Integer sourceId) {
for (ExecutionInfo info : infoList) {
if (sourceId.equals(info.getId().getSourceId())) {
return info;
}
}
return null;
}
public static class StratifyReportItem {
public long bits;
public long totalPersons;
public long timeAtRisk;
public long cases;
}
public static class GenerateSqlRequest {
public GenerateSqlRequest() {
}
@JsonProperty("analysisId")
public Integer analysisId;
@JsonProperty("expression")
public IncidenceRateAnalysisExpression expression;
@JsonProperty("options")
public IRAnalysisQueryBuilder.BuildExpressionQueryOptions options;
}
private final RowMapper<AnalysisReport.Summary> summaryMapper = (rs, rowNum) -> {
AnalysisReport.Summary summary = new AnalysisReport.Summary();
summary.targetId = rs.getInt("target_id");
summary.outcomeId = rs.getInt("outcome_id");
summary.totalPersons = rs.getLong("person_count");
summary.timeAtRisk = rs.getLong("time_at_risk");
summary.cases = rs.getLong("cases");
return summary;
};
private List<AnalysisReport.Summary> getAnalysisSummaryList(int id, Source source) {
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String sql = "select target_id, outcome_id, sum(person_count) as person_count, sum(time_at_risk) as time_at_risk," +
" sum(cases) as cases from @tableQualifier.ir_analysis_result where analysis_id = @id GROUP BY target_id, outcome_id";
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", whitelist(id));
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), summaryMapper);
}
private final RowMapper<AnalysisReport.StrataStatistic> strataRuleStatisticMapper = (rs, rowNum) -> {
AnalysisReport.StrataStatistic statistic = new AnalysisReport.StrataStatistic();
statistic.id = rs.getInt("strata_sequence");
statistic.name = rs.getString("name");
statistic.targetId = rs.getInt("target_id");
statistic.outcomeId = rs.getInt("outcome_id");
statistic.totalPersons = rs.getLong("person_count");
statistic.timeAtRisk = rs.getLong("time_at_risk");
statistic.cases = rs.getLong("cases");
return statistic;
};
private List<AnalysisReport.StrataStatistic> getStrataStatistics(int id, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, STRATA_STATS_QUERY_TEMPLATE, "results_database_schema", resultsTableQualifier, "analysis_id", whitelist(id));
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), strataRuleStatisticMapper);
}
private int countSetBits(long n) {
int count = 0;
while (n > 0) {
n &= (n - 1);
count++;
}
return count;
}
private String formatBitMask(Long n, int size) {
return StringUtils.reverse(StringUtils.leftPad(Long.toBinaryString(n), size, "0"));
}
private final RowMapper<StratifyReportItem> stratifyResultsMapper = (rs, rowNum) -> {
StratifyReportItem resultItem = new StratifyReportItem();
resultItem.bits = rs.getLong("strata_mask");
resultItem.totalPersons = rs.getLong("person_count");
resultItem.timeAtRisk = rs.getLong("time_at_risk");
resultItem.cases = rs.getLong("cases");
return resultItem;
};
private String getStrataTreemapData(int analysisId, int targetId, int outcomeId, int inclusionRuleCount, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String query = "select strata_mask, person_count, time_at_risk, cases from @resultsTableQualifier.ir_analysis_result where analysis_id = @analysis_id and target_id = @target_id and outcome_id = @outcome_id";
Object[] paramValues = {analysisId, targetId, outcomeId};
String[] params = {"analysis_id", "target_id", "outcome_id"};
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, query, "resultsTableQualifier", resultsTableQualifier, params, paramValues, SessionUtils.sessionId());
// [0] is the inclusion rule bitmask, [1] is the count of the match
List<StratifyReportItem> items = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), stratifyResultsMapper);
Map<Integer, List<StratifyReportItem>> groups = new HashMap<>();
for (StratifyReportItem item : items) {
int bitsSet = countSetBits(item.bits);
if (!groups.containsKey(bitsSet)) {
groups.put(bitsSet, new ArrayList<>());
}
groups.get(bitsSet).add(item);
}
StringBuilder treemapData = new StringBuilder("{\"name\" : \"Everyone\", \"children\" : [");
List<Integer> groupKeys = new ArrayList<>(groups.keySet());
Collections.sort(groupKeys);
Collections.reverse(groupKeys);
int groupCount = 0;
// create a nested treemap data where more matches (more bits set in string) appear higher in the hierarchy)
for (Integer groupKey : groupKeys) {
if (groupCount > 0) {
treemapData.append(",");
}
treemapData.append(String.format("{\"name\" : \"Group %d\", \"children\" : [", groupKey));
int groupItemCount = 0;
for (StratifyReportItem groupItem : groups.get(groupKey)) {
if (groupItemCount > 0) {
treemapData.append(",");
}
//sb_treemap.Append("{\"name\": \"" + cohort_identifer + "\", \"size\": " + cohorts[cohort_identifer].ToString() + "}");
treemapData.append(String.format("{\"name\": \"%s\", \"size\": %d, \"cases\": %d, \"timeAtRisk\": %d }", formatBitMask(groupItem.bits, inclusionRuleCount), groupItem.totalPersons, groupItem.cases, groupItem.timeAtRisk));
groupItemCount++;
}
groupCount++;
}
treemapData.append(StringUtils.repeat("]}", groupCount + 1));
return treemapData.toString();
}
@Override
public List<IRAnalysisShortDTO> getIRAnalysisList() {
return getTransactionTemplate().execute(transactionStatus -> {
Iterable<IncidenceRateAnalysis> analysisList = this.irAnalysisRepository.findAll();
return StreamSupport.stream(analysisList.spliterator(), false)
.map(analysis -> conversionService.convert(analysis, IRAnalysisShortDTO.class))
.collect(Collectors.toList());
});
}
@Override
public int getCountIRWithSameName(final int id, String name) {
return irAnalysisRepository.getCountIRWithSameName(id, name);
}
@Override
public IRAnalysisDTO createAnalysis(IRAnalysisDTO analysis) {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
// it might be possible to leverage saveAnalysis() but not sure how to pull the auto ID from
// the DB to pass it into saveAnalysis (since saveAnalysis does a findOne() at the start).
// If there's a way to get the Entity into the persistence manager so findOne() returns this newly created entity
// then we could create the entity here (without persist) and then call saveAnalysis within the same Tx.
IncidenceRateAnalysis newAnalysis = new IncidenceRateAnalysis();
newAnalysis.setName(analysis.getName())
.setDescription(analysis.getDescription());
newAnalysis.setCreatedBy(user);
newAnalysis.setCreatedDate(currentTime);
if (analysis.getExpression() != null) {
IncidenceRateAnalysisDetails details = new IncidenceRateAnalysisDetails(newAnalysis);
newAnalysis.setDetails(details);
details.setExpression(analysis.getExpression());
}
else {
newAnalysis.setDetails(null);
}
IncidenceRateAnalysis createdAnalysis = this.irAnalysisRepository.save(newAnalysis);
return conversionService.convert(createdAnalysis, IRAnalysisDTO.class);
}
@Override
public IRAnalysisDTO getAnalysis(final int id) {
return getTransactionTemplate().execute(transactionStatus -> {
IncidenceRateAnalysis a = this.irAnalysisRepository.findOne(id);
ExceptionUtils.throwNotFoundExceptionIfNull(a, String.format(NO_INCIDENCE_RATE_ANALYSIS_MESSAGE, id));
return conversionService.convert(a, IRAnalysisDTO.class);
});
}
@Override
public IRAnalysisDTO saveAnalysis(final int id, IRAnalysisDTO analysis) {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
IncidenceRateAnalysis updatedAnalysis = this.irAnalysisRepository.findOne(id);
updatedAnalysis.setName(analysis.getName())
.setDescription(analysis.getDescription());
updatedAnalysis.setModifiedBy(user);
updatedAnalysis.setModifiedDate(currentTime);
if (analysis.getExpression() != null) {
IncidenceRateAnalysisDetails details = updatedAnalysis.getDetails();
if (details == null) {
details = new IncidenceRateAnalysisDetails(updatedAnalysis);
updatedAnalysis.setDetails(details);
}
details.setExpression(analysis.getExpression());
}
else
updatedAnalysis.setDetails(null);
this.irAnalysisRepository.save(updatedAnalysis);
return getAnalysis(id);
}
@Override
@DataSourceAccess
public JobExecutionResource performAnalysis(final int analysisId, final @SourceKey String sourceKey) {
Date startTime = Calendar.getInstance().getTime();
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey));
sourceAccessor.checkAccess(source);
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager().getTransaction(requresNewTx);
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOneWithExecutionsOnExistingSources(analysisId, ANALYSIS_WITH_EXECUTION_INFO);
ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(), source.getSourceId());
if (analysisInfo != null) {
if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
return null; // Exit execution, another process has started it.
}
else {
analysisInfo = new ExecutionInfo(analysis, source);
analysis.getExecutionInfoList().add(analysisInfo);
}
analysisInfo.setStatus(GenerationStatus.PENDING)
.setStartTime(startTime)
.setExecutionDuration(null);
this.irAnalysisRepository.save(analysis);
this.getTransactionTemplate().getTransactionManager().commit(initStatus);
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString(JOB_NAME, String.format("IR Analysis: %d: %s (%s)", analysis.getId(), source.getSourceName(), source.getSourceKey()));
builder.addString(ANALYSIS_ID, String.valueOf(analysisId));
builder.addString(SOURCE_ID, String.valueOf(source.getSourceId()));
SimpleJobBuilder generateIrJob = generationUtils.buildJobForCohortBasedAnalysisTasklet(
GENERATE_IR_ANALYSIS,
source,
builder,
getSourceJdbcTemplate(source),
chunkContext -> {
Integer irId = Integer.valueOf(chunkContext.getStepContext().getJobParameters().get(ANALYSIS_ID).toString());
IncidenceRateAnalysis ir = this.irAnalysisRepository.findOne(irId);
IncidenceRateAnalysisExpression expression = Utils.deserialize(ir.getDetails().getExpression(), IncidenceRateAnalysisExpression.class);
return Stream.concat(
expression.targetIds.stream(),
expression.outcomeIds.stream()
).map(id -> {
CohortDefinition cd = new CohortDefinition();
cd.setId(id);
return cd;
})
.collect(Collectors.toList());
},
new IRAnalysisTasklet(getSourceJdbcTemplate(source), getTransactionTemplate(), irAnalysisRepository, sourceService, queryBuilder, objectMapper)
);
generateIrJob.listener(new IRAnalysisInfoListener(getTransactionTemplate(), irAnalysisRepository));
final JobParameters jobParameters = builder.toJobParameters();
return jobService.runJob(generateIrJob.build(), jobParameters);
}
@Override
public void cancelAnalysis(int analysisId, String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
jobService.cancelJobExecution(NAME, j -> {
JobParameters jobParameters = j.getJobParameters();
return Objects.equals(jobParameters.getString(ANALYSIS_ID), String.valueOf(analysisId))
&& Objects.equals(jobParameters.getString(SOURCE_ID), String.valueOf(source.getSourceId()));
});
}
@Override
public List<AnalysisInfoDTO> getAnalysisInfo(final int id) {
List<ExecutionInfo> executionInfoList = irExecutionInfoRepository.findByAnalysisId(id);
return executionInfoList.stream().map(ei -> {
AnalysisInfoDTO info = new AnalysisInfoDTO();
info.setExecutionInfo(ei);
return info;
}).collect(Collectors.toList());
}
@Override
@DataSourceAccess
public AnalysisInfoDTO getAnalysisInfo(int id, @SourceKey String sourceKey) {
Source source = sourceService.findBySourceKey(sourceKey);
ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey));
sourceAccessor.checkAccess(source);
AnalysisInfoDTO info = new AnalysisInfoDTO();
List<ExecutionInfo> executionInfoList = irExecutionInfoRepository.findByAnalysisId(id);
info.setExecutionInfo(executionInfoList.stream().filter(i -> Objects.equals(i.getSource(), source))
.findFirst().orElse(null));
try{
if (Objects.nonNull(info.getExecutionInfo()) && Objects.equals(info.getExecutionInfo().getStatus(), GenerationStatus.COMPLETE)
&& info.getExecutionInfo().getIsValid()) {
info.setSummaryList(getAnalysisSummaryList(id, source));
}
}catch (Exception e) {
log.error("Error getting IR Analysis summary list", e);
throw new InternalServerErrorException();
}
return info;
}
@Override
public AnalysisReport getAnalysisReport(final int id, final String sourceKey, final int targetId, final int outcomeId ) {
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
AnalysisReport.Summary summary = IterableUtils.find(getAnalysisSummaryList(id, source), summary12 -> ((summary12.targetId == targetId) && (summary12.outcomeId == outcomeId)));
Collection<AnalysisReport.StrataStatistic> strataStats = CollectionUtils.select(getStrataStatistics(id, source),
summary1 -> ((summary1.targetId == targetId) && (summary1.outcomeId == outcomeId)));
String treemapData = getStrataTreemapData(id, targetId, outcomeId, strataStats.size(), source);
AnalysisReport report = new AnalysisReport();
report.summary = summary;
report.stratifyStats = new ArrayList<>(strataStats);
report.treemapData = treemapData;
return report;
}
@Override
public GenerateSqlResult generateSql(GenerateSqlRequest request) {
IRAnalysisQueryBuilder.BuildExpressionQueryOptions options = request.options;
GenerateSqlResult result = new GenerateSqlResult();
if (options == null) {
options = new IRAnalysisQueryBuilder.BuildExpressionQueryOptions();
}
String expressionSql = queryBuilder.buildAnalysisQuery(request.expression, request.analysisId, options);
result.templateSql = SqlRender.renderSql(expressionSql, null, null);
return result;
}
@Override
public IRAnalysisDTO copy(final int id) {
IRAnalysisDTO analysis = getAnalysis(id);
analysis.setId(null); // clear the ID
analysis.setName(getNameForCopy(analysis.getName()));
return createAnalysis(analysis);
}
@Override
public Response export(final int id) {
Response response = null;
HashMap<String, String> fileList = new HashMap<>();
HashMap<Integer, String> distTypeLookup = new HashMap<>();
distTypeLookup.put(1, "TAR");
distTypeLookup.put(2, "TTO");
try {
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(id);
Set<ExecutionInfo> executions = analysis.getExecutionInfoList();
fileList.put("analysisDefinition.json", analysis.getDetails().getExpression());
// squentially return reults of IR calculation. In Spring 1.4.2, we can utlilize @Async operations to do this in parallel.
// store results in single CSV file
ArrayList<String[]> summaryLines = new ArrayList<>();
ArrayList<String[]> strataLines = new ArrayList<>();
ArrayList<String[]> distLines = new ArrayList<>();
for (ExecutionInfo execution : executions)
{
Source source = execution.getSource();
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
// perform this query to CDM in an isolated transaction to avoid expensive JDBC transaction synchronization
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplateRequiresNew().getTransactionManager().getTransaction(requresNewTx);
// get the summary data
List<AnalysisReport.Summary> summaryList = getAnalysisSummaryList(id, source);
if (summaryLines.isEmpty())
{
summaryLines.add("db_id#targetId#outcomeId#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.Summary summary : summaryList)
{
summaryLines.add(new String[] {source.getSourceKey(),String.valueOf(summary.targetId), String.valueOf(summary.outcomeId), String.valueOf(summary.totalPersons), String.valueOf(summary.timeAtRisk), String.valueOf(summary.cases)});
}
// get the strata results
List<AnalysisReport.StrataStatistic> strataList = getStrataStatistics(id, source);
if (strataLines.isEmpty())
{
strataLines.add("db_id#targetId#outcomeId#strata_id#strata_name#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.StrataStatistic strata : strataList)
{
strataLines.add(new String[] {source.getSourceKey(),String.valueOf(strata.targetId), String.valueOf(strata.outcomeId),String.valueOf(strata.id), String.valueOf(strata.name), String.valueOf(strata.totalPersons), String.valueOf(strata.timeAtRisk), String.valueOf(strata.cases)});
}
// get the distribution data
String distQuery = String.format("select '%s' as db_id, target_id, outcome_id, strata_sequence, dist_type, total, avg_value, std_dev, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value from %s.ir_analysis_dist where analysis_id = %d", source.getSourceKey(), resultsTableQualifier, id);
String translatedSql = SqlTranslate.translateSql(distQuery, source.getSourceDialect(), SessionUtils.sessionId(), resultsTableQualifier);
SqlRowSet rs = this.getSourceJdbcTemplate(source).queryForRowSet(translatedSql);
this.getTransactionTemplateRequiresNew().getTransactionManager().commit(initStatus);
if (distLines.isEmpty())
{
distLines.add(rs.getMetaData().getColumnNames());
}
while (rs.next())
{
ArrayList<String> columns = new ArrayList<>();
for(int i = 1; i <= rs.getMetaData().getColumnNames().length; i++)
{
switch (rs.getMetaData().getColumnName(i)) {
case "dist_type":
columns.add(distTypeLookup.get(rs.getInt(i)));
break;
default:
columns.add(rs.getString(i));
break;
}
}
distLines.add(columns.toArray(new String[0]));
}
}
// Write report lines to CSV
StringWriter sw = null;
CSVWriter csvWriter = null;
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(summaryLines);
csvWriter.flush();
fileList.put("ir_summary.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(strataLines);
csvWriter.flush();
fileList.put("ir_strata.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(distLines);
csvWriter.flush();
fileList.put("ir_dist.csv", sw.getBuffer().toString());
// build zip output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
for(String fileName : fileList.keySet())
{
ZipEntry resultsEntry = new ZipEntry(fileName);
zos.putNextEntry(resultsEntry);
zos.write(fileList.get(fileName).getBytes());
}
zos.closeEntry();
zos.close();
baos.flush();
baos.close();
response = Response
.ok(baos)
.type(MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", String.format("attachment; filename=\"%s\"", "ir_analysis_" + id + ".zip"))
.build();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
return response;
}
@Override
public void delete(final int id) {
irAnalysisRepository.delete(id);
}
@Override
public void deleteInfo(final int id, final String sourceKey) {
IncidenceRateAnalysis analysis = irAnalysisRepository.findOne(id);
ExecutionInfo itemToRemove = null;
for (ExecutionInfo info : analysis.getExecutionInfoList())
{
if (info.getSource().getSourceKey().equals(sourceKey))
itemToRemove = info;
}
if (itemToRemove != null)
analysis.getExecutionInfoList().remove(itemToRemove);
irAnalysisRepository.save(analysis);
}
@PostConstruct
public void init() {
invalidateIRExecutions();
}
@Override
public String getJobName() {
return NAME;
}
@Override
public String getExecutionFoldingKey() {
return ANALYSIS_ID;
}
private void invalidateIRExecutions() {
getTransactionTemplateRequiresNew().execute(status -> {
List<ExecutionInfo> executions = irExecutionInfoRepository.findByStatusIn(INVALIDATE_STATUSES);
invalidateExecutions(executions);
irExecutionInfoRepository.save(executions);
return null;
});
}
private String getNameForCopy(String dtoName) {
return CopyUtils.getNameForCopy(dtoName, this::countLikeName, irAnalysisRepository.findByName(dtoName));
}
private int countLikeName(String name) {
return irAnalysisRepository.countByNameStartsWith(name);
}
}
| src/main/java/org/ohdsi/webapi/service/IRAnalysisService.java | /*
* Copyright 2015 Observational Health Data Sciences and Informatics [OHDSI.org].
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ohdsi.webapi.service;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraph;
import com.cosium.spring.data.jpa.entity.graph.domain.EntityGraphUtils;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.opencsv.CSVWriter;
import org.apache.commons.collections4.CollectionUtils;
import org.apache.commons.collections4.IterableUtils;
import org.apache.commons.lang3.StringUtils;
import org.ohdsi.analysis.Utils;
import org.ohdsi.circe.helper.ResourceHelper;
import org.ohdsi.sql.SqlRender;
import org.ohdsi.sql.SqlTranslate;
import org.ohdsi.webapi.GenerationStatus;
import org.ohdsi.webapi.cohortdefinition.CohortDefinition;
import org.ohdsi.webapi.common.generation.GenerateSqlResult;
import org.ohdsi.webapi.common.generation.GenerationUtils;
import org.ohdsi.webapi.ircalc.*;
import org.ohdsi.webapi.job.GeneratesNotification;
import org.ohdsi.webapi.job.JobExecutionResource;
import org.ohdsi.webapi.service.dto.AnalysisInfoDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisDTO;
import org.ohdsi.webapi.service.dto.IRAnalysisShortDTO;
import org.ohdsi.webapi.shiro.Entities.UserEntity;
import org.ohdsi.webapi.shiro.Entities.UserRepository;
import org.ohdsi.webapi.shiro.annotations.DataSourceAccess;
import org.ohdsi.webapi.shiro.annotations.SourceKey;
import org.ohdsi.webapi.shiro.management.Security;
import org.ohdsi.webapi.source.Source;
import org.ohdsi.webapi.source.SourceDaimon;
import org.ohdsi.webapi.util.CopyUtils;
import org.ohdsi.webapi.util.ExceptionUtils;
import org.ohdsi.webapi.util.PreparedStatementRenderer;
import org.ohdsi.webapi.util.SessionUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.JobParameters;
import org.springframework.batch.core.JobParametersBuilder;
import org.springframework.batch.core.job.builder.SimpleJobBuilder;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.convert.ConversionService;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.jdbc.support.rowset.SqlRowSet;
import org.springframework.stereotype.Component;
import org.springframework.transaction.TransactionDefinition;
import org.springframework.transaction.TransactionStatus;
import org.springframework.transaction.support.DefaultTransactionDefinition;
import javax.annotation.PostConstruct;
import javax.servlet.ServletContext;
import javax.ws.rs.InternalServerErrorException;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import java.io.ByteArrayOutputStream;
import java.io.StringWriter;
import java.util.*;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
import static org.ohdsi.webapi.Constants.GENERATE_IR_ANALYSIS;
import static org.ohdsi.webapi.Constants.Params.*;
import static org.ohdsi.webapi.util.SecurityUtils.whitelist;
/**
*
* @author Chris Knoll <[email protected]>
*/
@Component
public class IRAnalysisService extends AbstractDaoService implements GeneratesNotification, IRAnalysisResource {
private static final Logger log = LoggerFactory.getLogger(IRAnalysisService.class);
private final static String STRATA_STATS_QUERY_TEMPLATE = ResourceHelper.GetResourceAsString("/resources/incidencerate/sql/strata_stats.sql");
private static final String NAME = "irAnalysis";
private static final String NO_INCIDENCE_RATE_ANALYSIS_MESSAGE = "There is no incidence rate analysis with id = %d.";
private static final EntityGraph ANALYSIS_WITH_EXECUTION_INFO = EntityGraphUtils.fromName("IncidenceRateAnalysis.withExecutionInfoList");
private final IRAnalysisQueryBuilder queryBuilder;
@Autowired
private IncidenceRateAnalysisRepository irAnalysisRepository;
@Autowired
private IRExecutionInfoRepository irExecutionInfoRepository;
@Autowired
private UserRepository userRepository;
@Autowired
private JobService jobService;
@Autowired
private Security security;
@Autowired
private SourceService sourceService;
@Autowired
private GenerationUtils generationUtils;
@Autowired
ConversionService conversionService;
@Autowired
private ObjectMapper objectMapper;
@Context
ServletContext context;
public IRAnalysisService(final ObjectMapper objectMapper) {
this.queryBuilder = new IRAnalysisQueryBuilder(objectMapper);
}
private ExecutionInfo findExecutionInfoBySourceId(Collection<ExecutionInfo> infoList, Integer sourceId) {
for (ExecutionInfo info : infoList) {
if (sourceId.equals(info.getId().getSourceId())) {
return info;
}
}
return null;
}
public static class StratifyReportItem {
public long bits;
public long totalPersons;
public long timeAtRisk;
public long cases;
}
public static class GenerateSqlRequest {
public GenerateSqlRequest() {
}
@JsonProperty("analysisId")
public Integer analysisId;
@JsonProperty("expression")
public IncidenceRateAnalysisExpression expression;
@JsonProperty("options")
public IRAnalysisQueryBuilder.BuildExpressionQueryOptions options;
}
private final RowMapper<AnalysisReport.Summary> summaryMapper = (rs, rowNum) -> {
AnalysisReport.Summary summary = new AnalysisReport.Summary();
summary.targetId = rs.getInt("target_id");
summary.outcomeId = rs.getInt("outcome_id");
summary.totalPersons = rs.getLong("person_count");
summary.timeAtRisk = rs.getLong("time_at_risk");
summary.cases = rs.getLong("cases");
return summary;
};
private List<AnalysisReport.Summary> getAnalysisSummaryList(int id, Source source) {
String tqName = "tableQualifier";
String tqValue = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String sql = "select target_id, outcome_id, sum(person_count) as person_count, sum(time_at_risk) as time_at_risk," +
" sum(cases) as cases from @tableQualifier.ir_analysis_result where analysis_id = @id GROUP BY target_id, outcome_id";
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, sql, tqName, tqValue, "id", whitelist(id));
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), summaryMapper);
}
private final RowMapper<AnalysisReport.StrataStatistic> strataRuleStatisticMapper = (rs, rowNum) -> {
AnalysisReport.StrataStatistic statistic = new AnalysisReport.StrataStatistic();
statistic.id = rs.getInt("strata_sequence");
statistic.name = rs.getString("name");
statistic.targetId = rs.getInt("target_id");
statistic.outcomeId = rs.getInt("outcome_id");
statistic.totalPersons = rs.getLong("person_count");
statistic.timeAtRisk = rs.getLong("time_at_risk");
statistic.cases = rs.getLong("cases");
return statistic;
};
private List<AnalysisReport.StrataStatistic> getStrataStatistics(int id, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, STRATA_STATS_QUERY_TEMPLATE, "results_database_schema", resultsTableQualifier, "analysis_id", whitelist(id));
return getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), strataRuleStatisticMapper);
}
private int countSetBits(long n) {
int count = 0;
while (n > 0) {
n &= (n - 1);
count++;
}
return count;
}
private String formatBitMask(Long n, int size) {
return StringUtils.reverse(StringUtils.leftPad(Long.toBinaryString(n), size, "0"));
}
private final RowMapper<StratifyReportItem> stratifyResultsMapper = (rs, rowNum) -> {
StratifyReportItem resultItem = new StratifyReportItem();
resultItem.bits = rs.getLong("strata_mask");
resultItem.totalPersons = rs.getLong("person_count");
resultItem.timeAtRisk = rs.getLong("time_at_risk");
resultItem.cases = rs.getLong("cases");
return resultItem;
};
private String getStrataTreemapData(int analysisId, int targetId, int outcomeId, int inclusionRuleCount, Source source) {
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
String query = "select strata_mask, person_count, time_at_risk, cases from @resultsTableQualifier.ir_analysis_result where analysis_id = @analysis_id and target_id = @target_id and outcome_id = @outcome_id";
Object[] paramValues = {analysisId, targetId, outcomeId};
String[] params = {"analysis_id", "target_id", "outcome_id"};
PreparedStatementRenderer psr = new PreparedStatementRenderer(source, query, "resultsTableQualifier", resultsTableQualifier, params, paramValues, SessionUtils.sessionId());
// [0] is the inclusion rule bitmask, [1] is the count of the match
List<StratifyReportItem> items = getSourceJdbcTemplate(source).query(psr.getSql(), psr.getSetter(), stratifyResultsMapper);
Map<Integer, List<StratifyReportItem>> groups = new HashMap<>();
for (StratifyReportItem item : items) {
int bitsSet = countSetBits(item.bits);
if (!groups.containsKey(bitsSet)) {
groups.put(bitsSet, new ArrayList<>());
}
groups.get(bitsSet).add(item);
}
StringBuilder treemapData = new StringBuilder("{\"name\" : \"Everyone\", \"children\" : [");
List<Integer> groupKeys = new ArrayList<>(groups.keySet());
Collections.sort(groupKeys);
Collections.reverse(groupKeys);
int groupCount = 0;
// create a nested treemap data where more matches (more bits set in string) appear higher in the hierarchy)
for (Integer groupKey : groupKeys) {
if (groupCount > 0) {
treemapData.append(",");
}
treemapData.append(String.format("{\"name\" : \"Group %d\", \"children\" : [", groupKey));
int groupItemCount = 0;
for (StratifyReportItem groupItem : groups.get(groupKey)) {
if (groupItemCount > 0) {
treemapData.append(",");
}
//sb_treemap.Append("{\"name\": \"" + cohort_identifer + "\", \"size\": " + cohorts[cohort_identifer].ToString() + "}");
treemapData.append(String.format("{\"name\": \"%s\", \"size\": %d, \"cases\": %d, \"timeAtRisk\": %d }", formatBitMask(groupItem.bits, inclusionRuleCount), groupItem.totalPersons, groupItem.cases, groupItem.timeAtRisk));
groupItemCount++;
}
groupCount++;
}
treemapData.append(StringUtils.repeat("]}", groupCount + 1));
return treemapData.toString();
}
@Override
public List<IRAnalysisShortDTO> getIRAnalysisList() {
return getTransactionTemplate().execute(transactionStatus -> {
Iterable<IncidenceRateAnalysis> analysisList = this.irAnalysisRepository.findAll();
return StreamSupport.stream(analysisList.spliterator(), false)
.map(analysis -> conversionService.convert(analysis, IRAnalysisShortDTO.class))
.collect(Collectors.toList());
});
}
@Override
public int getCountIRWithSameName(final int id, String name) {
return irAnalysisRepository.getCountIRWithSameName(id, name);
}
@Override
public IRAnalysisDTO createAnalysis(IRAnalysisDTO analysis) {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
// it might be possible to leverage saveAnalysis() but not sure how to pull the auto ID from
// the DB to pass it into saveAnalysis (since saveAnalysis does a findOne() at the start).
// If there's a way to get the Entity into the persistence manager so findOne() returns this newly created entity
// then we could create the entity here (without persist) and then call saveAnalysis within the same Tx.
IncidenceRateAnalysis newAnalysis = new IncidenceRateAnalysis();
newAnalysis.setName(analysis.getName())
.setDescription(analysis.getDescription());
newAnalysis.setCreatedBy(user);
newAnalysis.setCreatedDate(currentTime);
if (analysis.getExpression() != null) {
IncidenceRateAnalysisDetails details = new IncidenceRateAnalysisDetails(newAnalysis);
newAnalysis.setDetails(details);
details.setExpression(analysis.getExpression());
}
else {
newAnalysis.setDetails(null);
}
IncidenceRateAnalysis createdAnalysis = this.irAnalysisRepository.save(newAnalysis);
return conversionService.convert(createdAnalysis, IRAnalysisDTO.class);
}
@Override
public IRAnalysisDTO getAnalysis(final int id) {
return getTransactionTemplate().execute(transactionStatus -> {
IncidenceRateAnalysis a = this.irAnalysisRepository.findOne(id);
ExceptionUtils.throwNotFoundExceptionIfNull(a, String.format(NO_INCIDENCE_RATE_ANALYSIS_MESSAGE, id));
return conversionService.convert(a, IRAnalysisDTO.class);
});
}
@Override
public IRAnalysisDTO saveAnalysis(final int id, IRAnalysisDTO analysis) {
Date currentTime = Calendar.getInstance().getTime();
UserEntity user = userRepository.findByLogin(security.getSubject());
IncidenceRateAnalysis updatedAnalysis = this.irAnalysisRepository.findOne(id);
updatedAnalysis.setName(analysis.getName())
.setDescription(analysis.getDescription());
updatedAnalysis.setModifiedBy(user);
updatedAnalysis.setModifiedDate(currentTime);
if (analysis.getExpression() != null) {
IncidenceRateAnalysisDetails details = updatedAnalysis.getDetails();
if (details == null) {
details = new IncidenceRateAnalysisDetails(updatedAnalysis);
updatedAnalysis.setDetails(details);
}
details.setExpression(analysis.getExpression());
}
else
updatedAnalysis.setDetails(null);
this.irAnalysisRepository.save(updatedAnalysis);
return getAnalysis(id);
}
@Override
public JobExecutionResource performAnalysis(final int analysisId, final String sourceKey) {
Date startTime = Calendar.getInstance().getTime();
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplate().getTransactionManager().getTransaction(requresNewTx);
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOneWithExecutionsOnExistingSources(analysisId, ANALYSIS_WITH_EXECUTION_INFO);
ExecutionInfo analysisInfo = findExecutionInfoBySourceId(analysis.getExecutionInfoList(), source.getSourceId());
if (analysisInfo != null) {
if (analysisInfo.getStatus() != GenerationStatus.COMPLETE)
return null; // Exit execution, another process has started it.
}
else {
analysisInfo = new ExecutionInfo(analysis, source);
analysis.getExecutionInfoList().add(analysisInfo);
}
analysisInfo.setStatus(GenerationStatus.PENDING)
.setStartTime(startTime)
.setExecutionDuration(null);
this.irAnalysisRepository.save(analysis);
this.getTransactionTemplate().getTransactionManager().commit(initStatus);
JobParametersBuilder builder = new JobParametersBuilder();
builder.addString(JOB_NAME, String.format("IR Analysis: %d: %s (%s)", analysis.getId(), source.getSourceName(), source.getSourceKey()));
builder.addString(ANALYSIS_ID, String.valueOf(analysisId));
builder.addString(SOURCE_ID, String.valueOf(source.getSourceId()));
SimpleJobBuilder generateIrJob = generationUtils.buildJobForCohortBasedAnalysisTasklet(
GENERATE_IR_ANALYSIS,
source,
builder,
getSourceJdbcTemplate(source),
chunkContext -> {
Integer irId = Integer.valueOf(chunkContext.getStepContext().getJobParameters().get(ANALYSIS_ID).toString());
IncidenceRateAnalysis ir = this.irAnalysisRepository.findOne(irId);
IncidenceRateAnalysisExpression expression = Utils.deserialize(ir.getDetails().getExpression(), IncidenceRateAnalysisExpression.class);
return Stream.concat(
expression.targetIds.stream(),
expression.outcomeIds.stream()
).map(id -> {
CohortDefinition cd = new CohortDefinition();
cd.setId(id);
return cd;
})
.collect(Collectors.toList());
},
new IRAnalysisTasklet(getSourceJdbcTemplate(source), getTransactionTemplate(), irAnalysisRepository, sourceService, queryBuilder, objectMapper)
);
generateIrJob.listener(new IRAnalysisInfoListener(getTransactionTemplate(), irAnalysisRepository));
final JobParameters jobParameters = builder.toJobParameters();
return jobService.runJob(generateIrJob.build(), jobParameters);
}
@Override
public void cancelAnalysis(int analysisId, String sourceKey) {
Source source = getSourceRepository().findBySourceKey(sourceKey);
jobService.cancelJobExecution(NAME, j -> {
JobParameters jobParameters = j.getJobParameters();
return Objects.equals(jobParameters.getString(ANALYSIS_ID), String.valueOf(analysisId))
&& Objects.equals(jobParameters.getString(SOURCE_ID), String.valueOf(source.getSourceId()));
});
}
@Override
public List<AnalysisInfoDTO> getAnalysisInfo(final int id) {
List<ExecutionInfo> executionInfoList = irExecutionInfoRepository.findByAnalysisId(id);
return executionInfoList.stream().map(ei -> {
AnalysisInfoDTO info = new AnalysisInfoDTO();
info.setExecutionInfo(ei);
return info;
}).collect(Collectors.toList());
}
@Override
@DataSourceAccess
public AnalysisInfoDTO getAnalysisInfo(int id, @SourceKey String sourceKey) {
Source source = sourceService.findBySourceKey(sourceKey);
ExceptionUtils.throwNotFoundExceptionIfNull(source, String.format("There is no source with sourceKey = %s", sourceKey));
AnalysisInfoDTO info = new AnalysisInfoDTO();
List<ExecutionInfo> executionInfoList = irExecutionInfoRepository.findByAnalysisId(id);
info.setExecutionInfo(executionInfoList.stream().filter(i -> Objects.equals(i.getSource(), source))
.findFirst().orElse(null));
try{
if (Objects.nonNull(info.getExecutionInfo()) && Objects.equals(info.getExecutionInfo().getStatus(), GenerationStatus.COMPLETE)
&& info.getExecutionInfo().getIsValid()) {
info.setSummaryList(getAnalysisSummaryList(id, source));
}
}catch (Exception e) {
log.error("Error getting IR Analysis summary list", e);
throw new InternalServerErrorException();
}
return info;
}
@Override
public AnalysisReport getAnalysisReport(final int id, final String sourceKey, final int targetId, final int outcomeId ) {
Source source = this.getSourceRepository().findBySourceKey(sourceKey);
AnalysisReport.Summary summary = IterableUtils.find(getAnalysisSummaryList(id, source), summary12 -> ((summary12.targetId == targetId) && (summary12.outcomeId == outcomeId)));
Collection<AnalysisReport.StrataStatistic> strataStats = CollectionUtils.select(getStrataStatistics(id, source),
summary1 -> ((summary1.targetId == targetId) && (summary1.outcomeId == outcomeId)));
String treemapData = getStrataTreemapData(id, targetId, outcomeId, strataStats.size(), source);
AnalysisReport report = new AnalysisReport();
report.summary = summary;
report.stratifyStats = new ArrayList<>(strataStats);
report.treemapData = treemapData;
return report;
}
@Override
public GenerateSqlResult generateSql(GenerateSqlRequest request) {
IRAnalysisQueryBuilder.BuildExpressionQueryOptions options = request.options;
GenerateSqlResult result = new GenerateSqlResult();
if (options == null) {
options = new IRAnalysisQueryBuilder.BuildExpressionQueryOptions();
}
String expressionSql = queryBuilder.buildAnalysisQuery(request.expression, request.analysisId, options);
result.templateSql = SqlRender.renderSql(expressionSql, null, null);
return result;
}
@Override
public IRAnalysisDTO copy(final int id) {
IRAnalysisDTO analysis = getAnalysis(id);
analysis.setId(null); // clear the ID
analysis.setName(getNameForCopy(analysis.getName()));
return createAnalysis(analysis);
}
@Override
public Response export(final int id) {
Response response = null;
HashMap<String, String> fileList = new HashMap<>();
HashMap<Integer, String> distTypeLookup = new HashMap<>();
distTypeLookup.put(1, "TAR");
distTypeLookup.put(2, "TTO");
try {
IncidenceRateAnalysis analysis = this.irAnalysisRepository.findOne(id);
Set<ExecutionInfo> executions = analysis.getExecutionInfoList();
fileList.put("analysisDefinition.json", analysis.getDetails().getExpression());
// squentially return reults of IR calculation. In Spring 1.4.2, we can utlilize @Async operations to do this in parallel.
// store results in single CSV file
ArrayList<String[]> summaryLines = new ArrayList<>();
ArrayList<String[]> strataLines = new ArrayList<>();
ArrayList<String[]> distLines = new ArrayList<>();
for (ExecutionInfo execution : executions)
{
Source source = execution.getSource();
String resultsTableQualifier = source.getTableQualifier(SourceDaimon.DaimonType.Results);
// perform this query to CDM in an isolated transaction to avoid expensive JDBC transaction synchronization
DefaultTransactionDefinition requresNewTx = new DefaultTransactionDefinition();
requresNewTx.setPropagationBehavior(TransactionDefinition.PROPAGATION_REQUIRES_NEW);
TransactionStatus initStatus = this.getTransactionTemplateRequiresNew().getTransactionManager().getTransaction(requresNewTx);
// get the summary data
List<AnalysisReport.Summary> summaryList = getAnalysisSummaryList(id, source);
if (summaryLines.isEmpty())
{
summaryLines.add("db_id#targetId#outcomeId#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.Summary summary : summaryList)
{
summaryLines.add(new String[] {source.getSourceKey(),String.valueOf(summary.targetId), String.valueOf(summary.outcomeId), String.valueOf(summary.totalPersons), String.valueOf(summary.timeAtRisk), String.valueOf(summary.cases)});
}
// get the strata results
List<AnalysisReport.StrataStatistic> strataList = getStrataStatistics(id, source);
if (strataLines.isEmpty())
{
strataLines.add("db_id#targetId#outcomeId#strata_id#strata_name#total#timeAtRisk#cases".split("#"));
}
for (AnalysisReport.StrataStatistic strata : strataList)
{
strataLines.add(new String[] {source.getSourceKey(),String.valueOf(strata.targetId), String.valueOf(strata.outcomeId),String.valueOf(strata.id), String.valueOf(strata.name), String.valueOf(strata.totalPersons), String.valueOf(strata.timeAtRisk), String.valueOf(strata.cases)});
}
// get the distribution data
String distQuery = String.format("select '%s' as db_id, target_id, outcome_id, strata_sequence, dist_type, total, avg_value, std_dev, min_value, p10_value, p25_value, median_value, p75_value, p90_value, max_value from %s.ir_analysis_dist where analysis_id = %d", source.getSourceKey(), resultsTableQualifier, id);
String translatedSql = SqlTranslate.translateSql(distQuery, source.getSourceDialect(), SessionUtils.sessionId(), resultsTableQualifier);
SqlRowSet rs = this.getSourceJdbcTemplate(source).queryForRowSet(translatedSql);
this.getTransactionTemplateRequiresNew().getTransactionManager().commit(initStatus);
if (distLines.isEmpty())
{
distLines.add(rs.getMetaData().getColumnNames());
}
while (rs.next())
{
ArrayList<String> columns = new ArrayList<>();
for(int i = 1; i <= rs.getMetaData().getColumnNames().length; i++)
{
switch (rs.getMetaData().getColumnName(i)) {
case "dist_type":
columns.add(distTypeLookup.get(rs.getInt(i)));
break;
default:
columns.add(rs.getString(i));
break;
}
}
distLines.add(columns.toArray(new String[0]));
}
}
// Write report lines to CSV
StringWriter sw = null;
CSVWriter csvWriter = null;
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(summaryLines);
csvWriter.flush();
fileList.put("ir_summary.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(strataLines);
csvWriter.flush();
fileList.put("ir_strata.csv", sw.getBuffer().toString());
sw = new StringWriter();
csvWriter = new CSVWriter(sw);
csvWriter.writeAll(distLines);
csvWriter.flush();
fileList.put("ir_dist.csv", sw.getBuffer().toString());
// build zip output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ZipOutputStream zos = new ZipOutputStream(baos);
for(String fileName : fileList.keySet())
{
ZipEntry resultsEntry = new ZipEntry(fileName);
zos.putNextEntry(resultsEntry);
zos.write(fileList.get(fileName).getBytes());
}
zos.closeEntry();
zos.close();
baos.flush();
baos.close();
response = Response
.ok(baos)
.type(MediaType.APPLICATION_OCTET_STREAM)
.header("Content-Disposition", String.format("attachment; filename=\"%s\"", "ir_analysis_" + id + ".zip"))
.build();
} catch (Exception ex) {
throw new RuntimeException(ex);
}
return response;
}
@Override
public void delete(final int id) {
irAnalysisRepository.delete(id);
}
@Override
public void deleteInfo(final int id, final String sourceKey) {
IncidenceRateAnalysis analysis = irAnalysisRepository.findOne(id);
ExecutionInfo itemToRemove = null;
for (ExecutionInfo info : analysis.getExecutionInfoList())
{
if (info.getSource().getSourceKey().equals(sourceKey))
itemToRemove = info;
}
if (itemToRemove != null)
analysis.getExecutionInfoList().remove(itemToRemove);
irAnalysisRepository.save(analysis);
}
@PostConstruct
public void init() {
invalidateIRExecutions();
}
@Override
public String getJobName() {
return NAME;
}
@Override
public String getExecutionFoldingKey() {
return ANALYSIS_ID;
}
private void invalidateIRExecutions() {
getTransactionTemplateRequiresNew().execute(status -> {
List<ExecutionInfo> executions = irExecutionInfoRepository.findByStatusIn(INVALIDATE_STATUSES);
invalidateExecutions(executions);
irExecutionInfoRepository.save(executions);
return null;
});
}
private String getNameForCopy(String dtoName) {
return CopyUtils.getNameForCopy(dtoName, this::countLikeName, irAnalysisRepository.findByName(dtoName));
}
private int countLikeName(String name) {
return irAnalysisRepository.countByNameStartsWith(name);
}
}
| Fixes IR execution on non-accessible IRs (#1124)
| src/main/java/org/ohdsi/webapi/service/IRAnalysisService.java | Fixes IR execution on non-accessible IRs (#1124) |
|
Java | apache-2.0 | dcc13892365591dfda51757bf99b126feaefc4ad | 0 | gstevey/gradle,robinverduijn/gradle,gstevey/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,gradle/gradle,gradle/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,blindpirate/gradle,gstevey/gradle,lsmaira/gradle,lsmaira/gradle,gstevey/gradle,blindpirate/gradle,lsmaira/gradle,robinverduijn/gradle,robinverduijn/gradle,blindpirate/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,gradle/gradle,gradle/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,robinverduijn/gradle,robinverduijn/gradle,lsmaira/gradle,blindpirate/gradle,robinverduijn/gradle,gstevey/gradle,lsmaira/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle,lsmaira/gradle,gradle/gradle,gstevey/gradle,gstevey/gradle,gradle/gradle,robinverduijn/gradle,blindpirate/gradle,gradle/gradle,lsmaira/gradle | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.composite;
import com.google.common.collect.Maps;
import org.gradle.StartParameter;
import org.gradle.api.initialization.IncludedBuild;
import org.gradle.api.internal.GradleInternal;
import org.gradle.api.internal.SettingsInternal;
import org.gradle.initialization.IncludedBuildFactory;
import org.gradle.initialization.SettingsLoader;
import org.gradle.internal.service.ServiceRegistry;
import java.io.File;
import java.util.Collection;
import java.util.Map;
public class CompositeBuildSettingsLoader implements SettingsLoader {
private final SettingsLoader delegate;
private final ServiceRegistry buildServices;
public CompositeBuildSettingsLoader(SettingsLoader delegate, ServiceRegistry buildServices) {
this.delegate = delegate;
this.buildServices = buildServices;
}
@Override
public SettingsInternal findAndLoadSettings(GradleInternal gradle) {
SettingsInternal settings = delegate.findAndLoadSettings(gradle);
Collection<IncludedBuild> includedBuilds = getIncludedBuilds(gradle.getStartParameter(), settings);
if (!includedBuilds.isEmpty()) {
CompositeContextBuilder compositeContextBuilder = buildServices.get(CompositeContextBuilder.class);
compositeContextBuilder.addToCompositeContext(includedBuilds);
}
return settings;
}
private Collection<IncludedBuild> getIncludedBuilds(StartParameter startParameter, SettingsInternal settings) {
Map<File, IncludedBuild> includedBuildMap = Maps.newLinkedHashMap();
includedBuildMap.putAll(settings.getIncludedBuilds());
for (File file : startParameter.getIncludedBuilds()) {
IncludedBuildFactory includedBuildFactory = buildServices.get(IncludedBuildFactory.class);
if (!includedBuildMap.containsKey(file)) {
includedBuildMap.put(file, includedBuildFactory.createBuild(file));
}
}
return includedBuildMap.values();
}
}
| subprojects/core/src/main/java/org/gradle/internal/composite/CompositeBuildSettingsLoader.java | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.composite;
import com.google.common.collect.Maps;
import org.gradle.StartParameter;
import org.gradle.api.initialization.IncludedBuild;
import org.gradle.api.internal.GradleInternal;
import org.gradle.api.internal.SettingsInternal;
import org.gradle.initialization.IncludedBuildFactory;
import org.gradle.initialization.SettingsLoader;
import org.gradle.internal.service.ServiceRegistry;
import java.io.File;
import java.util.Collection;
import java.util.Map;
public class CompositeBuildSettingsLoader implements SettingsLoader {
private final SettingsLoader delegate;
private final ServiceRegistry buildServices;
public CompositeBuildSettingsLoader(SettingsLoader delegate, ServiceRegistry buildServices) {
this.delegate = delegate;
this.buildServices = buildServices;
}
@Override
public SettingsInternal findAndLoadSettings(GradleInternal gradle) {
SettingsInternal settings = delegate.findAndLoadSettings(gradle);
Collection<IncludedBuild> includedBuilds = getIncludedBuilds(gradle.getStartParameter(), settings);
if (!includedBuilds.isEmpty()) {
CompositeContextBuilder compositeContextBuilder = buildServices.get(CompositeContextBuilder.class);
compositeContextBuilder.addToCompositeContext(includedBuilds);
}
return settings;
}
private Collection<IncludedBuild> getIncludedBuilds(StartParameter startParameter, SettingsInternal settings) {
IncludedBuildFactory includedBuildFactory = buildServices.get(IncludedBuildFactory.class);
Map<File, IncludedBuild> includedBuildMap = Maps.newLinkedHashMap();
includedBuildMap.putAll(settings.getIncludedBuilds());
for (File file : startParameter.getIncludedBuilds()) {
if (!includedBuildMap.containsKey(file)) {
includedBuildMap.put(file, includedBuildFactory.createBuild(file));
}
}
return includedBuildMap.values();
}
}
| Load the `IncludedBuildFactory` on demand
This avoids requiring the `:compositeBuilds` module to be present
for any gradle integration testing, since this service implementation
is provided by `CompositeBuildServices`.
| subprojects/core/src/main/java/org/gradle/internal/composite/CompositeBuildSettingsLoader.java | Load the `IncludedBuildFactory` on demand |
|
Java | apache-2.0 | f61305c954c5dc0e118d160a35089f932d7ca0a9 | 0 | InterestingLab/waterdrop,InterestingLab/waterdrop | package io.github.interestinglab.waterdrop.utils;
import com.alibaba.fastjson.JSONObject;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;
import io.github.interestinglab.waterdrop.config.ConfigRuntimeException;
import java.util.List;
import java.util.Map;
public class SparkSturctTypeUtil {
public static StructType getStructType(StructType schema, JSONObject json) {
StructType newSchema = schema.copy(schema.fields());
for (Map.Entry<String, Object> entry : json.entrySet()) {
String field = entry.getKey();
Object type = entry.getValue();
if (type instanceof JSONObject) {
StructType st = getStructType(new StructType(), (JSONObject) type);
newSchema = newSchema.add(field, st);
} else if (type instanceof List) {
List list = (List) type;
if (list.size() == 0) {
newSchema = newSchema.add(field, DataTypes.createArrayType(null, true));
} else {
Object o = list.get(0);
if (o instanceof JSONObject) {
StructType st = getStructType(new StructType(), (JSONObject) o);
newSchema = newSchema.add(field, DataTypes.createArrayType(st, true));
} else {
DataType st = getType(o.toString());
newSchema = newSchema.add(field, DataTypes.createArrayType(st, true));
}
}
} else {
newSchema = newSchema.add(field, getType(type.toString()));
}
}
return newSchema;
}
private static DataType getType(String type) {
DataType dataType = DataTypes.NullType;
switch (type.toLowerCase()) {
case "string":
dataType = DataTypes.StringType;
break;
case "integer":
dataType = DataTypes.IntegerType;
break;
case "long":
dataType = DataTypes.LongType;
break;
case "double":
dataType = DataTypes.DoubleType;
break;
case "float":
dataType = DataTypes.FloatType;
break;
case "short":
dataType = DataTypes.ShortType;
break;
case "date":
dataType = DataTypes.DateType;
break;
case "timestamp":
dataType = DataTypes.TimestampType;
break;
case "boolean":
dataType = DataTypes.BooleanType;
break;
case "binary":
dataType = DataTypes.BinaryType;
break;
case "byte":
dataType = DataTypes.ByteType;
break;
default:
throw new ConfigRuntimeException("Throw data type exception, unknown type: " + type);
}
return dataType;
}
}
| waterdrop-core/src/main/java/io/github/interestinglab/waterdrop/utils/SparkSturctTypeUtil.java | package io.github.interestinglab.waterdrop.utils;
import com.alibaba.fastjson.JSONObject;
import org.apache.spark.sql.types.ArrayType;
import org.apache.spark.sql.types.DataType;
import org.apache.spark.sql.types.DataTypes;
import org.apache.spark.sql.types.StructType;
import io.github.interestinglab.waterdrop.config.ConfigRuntimeException;
import java.util.List;
import java.util.Map;
public class SparkSturctTypeUtil {
public static StructType getStructType(StructType schema, JSONObject json) {
StructType newSchema = schema.copy(schema.fields());
for (Map.Entry<String, Object> entry : json.entrySet()) {
String field = entry.getKey();
Object type = entry.getValue();
if (type instanceof JSONObject) {
StructType st = getStructType(new StructType(), (JSONObject) type);
newSchema = newSchema.add(field, st);
} else if (type instanceof List) {
List list = (List) type;
if (list.size() == 0) {
newSchema = newSchema.add(field, DataTypes.createArrayType(null, true));
} else {
Object o = list.get(0);
if (o instanceof JSONObject) {
StructType st = getStructType(new StructType(), (JSONObject) o);
newSchema = newSchema.add(field, DataTypes.createArrayType(st, true));
} else {
DataType st = getType(o.toString());
newSchema = newSchema.add(field, DataTypes.createArrayType(st, true));
}
}
} else {
newSchema = newSchema.add(field, getType(type.toString()));
}
}
return newSchema;
}
private static DataType getType(String type) {
DataType dataType = DataTypes.NullType;
switch (type.toLowerCase()) {
case "string":
dataType = DataTypes.StringType;
break;
case "integer":
dataType = DataTypes.IntegerType;
break;
case "long":
dataType = DataTypes.LongType;
break;
case "double":
dataType = DataTypes.DoubleType;
break;
case "float":
dataType = DataTypes.FloatType;
break;
case "short":
dataType = DataTypes.ShortType;
break;
case "date":
dataType = DataTypes.DateType;
break;
case "timestamp":
dataType = DataTypes.TimestampType;
break;
case "boolean":
dataType = DataTypes.BooleanType;
break;
case "binary":
dataType = DataTypes.BinaryType;
break;
case "byte":
dataType = DataTypes.ByteType;
break;
default:
throw new ConfigRuntimeException("Throw data type exception, unknown type: " + type);
}
return dataType;
}
}
| fix code style
| waterdrop-core/src/main/java/io/github/interestinglab/waterdrop/utils/SparkSturctTypeUtil.java | fix code style |
|
Java | apache-2.0 | 3790bcc994fd76286d17f2c475eca607a5b791dd | 0 | Leanplum/Leanplum-Android-SDK,Leanplum/Leanplum-Android-SDK,Leanplum/Leanplum-Android-SDK,Leanplum/Leanplum-Android-SDK | /*
* Copyright 2013, Leanplum, Inc. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.leanplum.internal;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.leanplum.Leanplum;
import com.leanplum.utils.SharedPreferencesUtil;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.UUID;
/**
* Leanplum request class.
*
* @author Andrew First
*/
public class Request {
private static final long DEVELOPMENT_MIN_DELAY_MS = 100;
private static final long DEVELOPMENT_MAX_DELAY_MS = 5000;
private static final long PRODUCTION_DELAY = 60000;
static final int MAX_EVENTS_PER_API_CALL;
static final String LEANPLUM = "__leanplum__";
static final String UUID_KEY = "uuid";
private static String appId;
private static String accessKey;
private static String deviceId;
private static String userId;
private static final LeanplumEventCallbackManager eventCallbackManager =
new LeanplumEventCallbackManager();
private static final Map<String, Boolean> fileTransferStatus = new HashMap<>();
private static int pendingDownloads;
private static NoPendingDownloadsCallback noPendingDownloadsBlock;
// The token is saved primarily for legacy SharedPreferences decryption. This could
// likely be removed in the future.
private static String token = null;
private static final Map<File, Long> fileUploadSize = new HashMap<>();
private static final Map<File, Double> fileUploadProgress = new HashMap<>();
private static String fileUploadProgressString = "";
private static long lastSendTimeMs;
private static final Object uploadFileLock = new Object();
private final String httpMethod;
private final String apiMethod;
private final Map<String, Object> params;
private ResponseCallback response;
private ErrorCallback error;
private boolean sent;
private long dataBaseIndex;
private static ApiResponseCallback apiResponse;
private static List<Map<String, Object>> localErrors = new ArrayList<>();
static {
if (Build.VERSION.SDK_INT <= 17) {
MAX_EVENTS_PER_API_CALL = 5000;
} else {
MAX_EVENTS_PER_API_CALL = 10000;
}
}
public static void setAppId(String appId, String accessKey) {
if (!TextUtils.isEmpty(appId)) {
Request.appId = appId.trim();
}
if (!TextUtils.isEmpty(accessKey)) {
Request.accessKey = accessKey.trim();
}
}
public static void setDeviceId(String deviceId) {
Request.deviceId = deviceId;
}
public static void setUserId(String userId) {
Request.userId = userId;
}
public static void setToken(String token) {
Request.token = token;
}
public static String token() {
return token;
}
/**
* Since requests are batched there can be a case where other Request can take future Request
* events. We need to have for each Request database index for handle response, error or start
* callbacks.
*
* @return Index of event at database.
*/
public long getDataBaseIndex() {
return dataBaseIndex;
}
// Update index of event at database.
public void setDataBaseIndex(long dataBaseIndex) {
this.dataBaseIndex = dataBaseIndex;
}
public static void loadToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
String token = defaults.getString(Constants.Defaults.TOKEN_KEY, null);
if (token == null) {
return;
}
setToken(token);
}
public static void saveToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = defaults.edit();
editor.putString(Constants.Defaults.TOKEN_KEY, Request.token());
SharedPreferencesUtil.commitChanges(editor);
}
public static String appId() {
return appId;
}
public static String deviceId() {
return deviceId;
}
public static String userId() {
return Request.userId;
}
public Request(String httpMethod, String apiMethod, Map<String, Object> params) {
this.httpMethod = httpMethod;
this.apiMethod = apiMethod;
this.params = params != null ? params : new HashMap<String, Object>();
// Check if it is error and here was SQLite exception.
if (Constants.Methods.LOG.equals(apiMethod) && LeanplumEventDataManager.willSendErrorLog) {
localErrors.add(createArgsDictionary());
}
// Make sure the Handler is initialized on the main thread.
OsHandler.getInstance();
dataBaseIndex = -1;
}
public static Request get(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("GET", apiMethod, params);
}
public static Request post(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("POST", apiMethod, params);
}
public void onResponse(ResponseCallback response) {
this.response = response;
}
public void onError(ErrorCallback error) {
this.error = error;
}
public void onApiResponse(ApiResponseCallback apiResponse) {
Request.apiResponse = apiResponse;
}
private Map<String, Object> createArgsDictionary() {
Map<String, Object> args = new HashMap<>();
args.put(Constants.Params.DEVICE_ID, deviceId);
args.put(Constants.Params.USER_ID, userId);
args.put(Constants.Params.ACTION, apiMethod);
args.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
args.put(Constants.Params.DEV_MODE, Boolean.toString(Constants.isDevelopmentModeEnabled));
args.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
if (token != null) {
args.put(Constants.Params.TOKEN, token);
}
args.putAll(params);
return args;
}
private void saveRequestForLater(final Map<String, Object> args) {
final Request currentRequest = this;
LeanplumEventDataManager.executeAsyncTask(new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
synchronized (Request.class) {
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
long count = LeanplumEventDataManager.getEventsCount();
String uuid = preferences.getString(Constants.Defaults.UUID_KEY, null);
if (uuid == null || count % MAX_EVENTS_PER_API_CALL == 0) {
uuid = UUID.randomUUID().toString();
editor.putString(Constants.Defaults.UUID_KEY, uuid);
SharedPreferencesUtil.commitChanges(editor);
}
args.put(UUID_KEY, uuid);
LeanplumEventDataManager.insertEvent(JsonConverter.toJson(args));
dataBaseIndex = count;
// Checks if here response and/or error callback for this request. We need to add callbacks to
// eventCallbackManager only if here was internet connection, otherwise triggerErrorCallback
// will handle error callback for this event.
if (response != null || error != null && !Util.isConnected()) {
eventCallbackManager.addCallbacks(currentRequest, response, error);
}
}
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
public void send() {
this.sendEventually();
if (Constants.isDevelopmentModeEnabled) {
long currentTimeMs = System.currentTimeMillis();
long delayMs;
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > DEVELOPMENT_MAX_DELAY_MS) {
delayMs = DEVELOPMENT_MIN_DELAY_MS;
} else {
delayMs = (lastSendTimeMs + DEVELOPMENT_MAX_DELAY_MS) - currentTimeMs;
}
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfConnected();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, delayMs);
}
}
/**
* Wait 1 second for potential other API calls, and then sends the call synchronously if no other
* call has been sent within 1 minute.
*/
public void sendIfDelayed() {
sendEventually();
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfDelayedHelper();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, 1000);
}
/**
* Sends the call synchronously if no other call has been sent within 1 minute.
*/
private void sendIfDelayedHelper() {
if (Constants.isDevelopmentModeEnabled) {
send();
} else {
long currentTimeMs = System.currentTimeMillis();
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > PRODUCTION_DELAY) {
sendIfConnected();
}
}
}
public void sendIfConnected() {
if (Util.isConnected()) {
this.sendNow();
} else {
this.sendEventually();
Log.i("Device is offline, will send later");
triggerErrorCallback(new Exception("Not connected to the Internet"));
}
}
private void triggerErrorCallback(Exception e) {
if (error != null) {
error.error(e);
}
if (apiResponse != null) {
List<Map<String, Object>> requests = getUnsentRequests();
List<Map<String, Object>> requestsToSend = removeIrrelevantBackgroundStartRequests(requests);
apiResponse.response(requestsToSend, null, requests.size());
}
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private static boolean attachApiKeys(Map<String, Object> dict) {
if (appId == null || accessKey == null) {
Log.e("API keys are not set. Please use Leanplum.setAppIdForDevelopmentMode or "
+ "Leanplum.setAppIdForProductionMode.");
return false;
}
dict.put(Constants.Params.APP_ID, appId);
dict.put(Constants.Params.CLIENT_KEY, accessKey);
dict.put(Constants.Params.CLIENT, Constants.CLIENT);
return true;
}
public interface ResponseCallback {
void response(JSONObject response);
}
public interface ApiResponseCallback {
void response(List<Map<String, Object>> requests, JSONObject response, int countOfEvents);
}
public interface ErrorCallback {
void error(Exception e);
}
public interface NoPendingDownloadsCallback {
void noPendingDownloads();
}
/**
* Parse response body from server. Invoke potential error or response callbacks for all events
* of this request.
*
* @param responseBody JSONObject with response body from server.
* @param requestsToSend List of requests that were sent to the server/
* @param error Exception.
* @param unsentRequestsSize Size of unsent request, that we will delete.
*/
private void parseResponseBody(JSONObject responseBody, List<Map<String, Object>>
requestsToSend, Exception error, int unsentRequestsSize) {
synchronized (Request.class) {
if (responseBody == null && error != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(error, unsentRequestsSize);
return;
} else if (responseBody == null) {
return;
}
// Response for last start call.
if (apiResponse != null) {
apiResponse.response(requestsToSend, responseBody, unsentRequestsSize);
}
// We will replace it with error from response body, if we found it.
Exception lastResponseError = error;
// Valid response, parse and handle response body.
int numResponses = Request.numResponses(responseBody);
for (int i = 0; i < numResponses; i++) {
JSONObject response = Request.getResponseAt(responseBody, i);
if (Request.isResponseSuccess(response)) {
continue; // If event response is successful, proceed with next one.
}
// If event response was not successful, handle error.
String errorMessage = getReadableErrorMessage(Request.getResponseError(response));
Log.e(errorMessage);
// Throw an exception if last event response is negative.
if (i == numResponses - 1) {
lastResponseError = new Exception(errorMessage);
}
}
if (lastResponseError != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(lastResponseError, unsentRequestsSize);
} else {
// Invoke potential response callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksForResponse(responseBody, unsentRequestsSize);
}
}
}
/**
* Parse error message from server response and return readable error message.
*
* @param errorMessage String of error from server response.
* @return String of readable error message.
*/
@NonNull
private String getReadableErrorMessage(String errorMessage) {
if (errorMessage == null || errorMessage.length() == 0) {
errorMessage = "API error";
} else if (errorMessage.startsWith("App not found")) {
errorMessage = "No app matching the provided app ID was found.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Invalid access key")) {
errorMessage = "The access key you provided is not valid for this app.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Development mode requested but not permitted")) {
errorMessage = "A call to Leanplum.setAppIdForDevelopmentMode "
+ "with your production key was made, which is not permitted.";
Constants.isInPermanentFailureState = true;
} else {
errorMessage = "API error: " + errorMessage;
}
return errorMessage;
}
private void sendNow() {
if (Constants.isTestMode) {
return;
}
if (appId == null) {
Log.e("Cannot send request. appId is not set.");
return;
}
if (accessKey == null) {
Log.e("Cannot send request. accessKey is not set.");
return;
}
this.sendEventually();
Util.executeAsyncTask(true, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
sendRequests();
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
private static class RequestsWithEncoding {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedString;
}
private RequestsWithEncoding getRequestsWithEncodedStringForErrors() {
List<Map<String, Object>> unsentRequests = new ArrayList<>();
List<Map<String, Object>> requestsToSend;
String jsonEncodedRequestsToSend;
String uuid = UUID.randomUUID().toString();
for (Map<String, Object> error : localErrors) {
error.put(UUID_KEY, uuid);
unsentRequests.add(error);
}
requestsToSend = unsentRequests;
jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend = requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private RequestsWithEncoding getRequestsWithEncodedStringStoredRequests() {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedRequestsToSend;
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
unsentRequests = getUnsentRequests();
requestsToSend = removeIrrelevantBackgroundStartRequests(unsentRequests);
jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend = requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private RequestsWithEncoding getRequestsWithEncodedString() {
RequestsWithEncoding requestsWithEncoding;
// Check if we have localErrors, if yes then we will send only errors to the server.
if (localErrors.size() != 0) {
requestsWithEncoding = getRequestsWithEncodedStringForErrors();
} else {
requestsWithEncoding = getRequestsWithEncodedStringStoredRequests();
}
return requestsWithEncoding;
}
private void sendRequests() {
RequestsWithEncoding requestsWithEncoding = getRequestsWithEncodedString();
List<Map<String, Object>> unsentRequests = requestsWithEncoding.unsentRequests;
List<Map<String, Object>> requestsToSend = requestsWithEncoding.requestsToSend;
String jsonEncodedString = requestsWithEncoding.jsonEncodedString;
if (requestsToSend.isEmpty()) {
return;
}
final Map<String, Object> multiRequestArgs = new HashMap<>();
if (!Request.attachApiKeys(multiRequestArgs)) {
return;
}
multiRequestArgs.put(Constants.Params.DATA, jsonEncodedString);
multiRequestArgs.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
multiRequestArgs.put(Constants.Params.ACTION, Constants.Methods.MULTI);
multiRequestArgs.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
JSONObject responseBody;
HttpURLConnection op = null;
try {
try {
op = Util.operation(
Constants.API_HOST_NAME,
Constants.API_SERVLET,
multiRequestArgs,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS);
responseBody = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
Exception errorException;
if (statusCode >= 200 && statusCode <= 299) {
if (responseBody == null) {
errorException = new Exception("Response JSON is null.");
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, errorException, unsentRequests.size());
return;
}
Exception exception = null;
// Checks if we received the same number of responses as a number of sent request.
int numResponses = Request.numResponses(responseBody);
if (numResponses != requestsToSend.size()) {
Log.w("Sent " + requestsToSend.size() + " requests but only" +
" received " + numResponses);
}
parseResponseBody(responseBody, requestsToSend, null, unsentRequests.size());
// Clear localErrors list.
localErrors.clear();
deleteSentRequests(unsentRequests.size());
// Send another request if the last request had maximum events per api call.
if (unsentRequests.size() == MAX_EVENTS_PER_API_CALL) {
sendRequests();
}
} else {
errorException = new Exception("HTTP error " + statusCode);
if (statusCode != -1 && statusCode != 408 && !(statusCode >= 500 && statusCode <= 599)) {
deleteSentRequests(unsentRequests.size());
parseResponseBody(responseBody, requestsToSend, errorException, unsentRequests.size());
}
}
} catch (JSONException e) {
Log.e("Error parsing JSON response: " + e.toString() + "\n" + Log.getStackTraceString(e));
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, e, unsentRequests.size());
} catch (Exception e) {
Log.e("Unable to send request: " + e.toString() + "\n" + Log.getStackTraceString(e));
} finally {
if (op != null) {
op.disconnect();
}
}
} catch (Throwable t) {
Util.handleException(t);
}
}
public void sendEventually() {
if (Constants.isTestMode) {
return;
}
if (LeanplumEventDataManager.willSendErrorLog) {
return;
}
if (!sent) {
sent = true;
Map<String, Object> args = createArgsDictionary();
saveRequestForLater(args);
}
}
static void deleteSentRequests(int requestsCount) {
if (requestsCount == 0) {
return;
}
synchronized (Request.class) {
LeanplumEventDataManager.deleteEvents(requestsCount);
}
}
private static List<Map<String, Object>> getUnsentRequests() {
List<Map<String, Object>> requestData;
synchronized (Request.class) {
lastSendTimeMs = System.currentTimeMillis();
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
requestData = LeanplumEventDataManager.getEvents(MAX_EVENTS_PER_API_CALL);
editor.remove(Constants.Defaults.UUID_KEY);
SharedPreferencesUtil.commitChanges(editor);
}
return requestData;
}
/**
* In various scenarios we can end up batching a big number of requests (e.g. device is offline,
* background sessions), which could make the stored API calls batch look something like:
* <p>
* <code>start(B), start(B), start(F), track, start(B), track, start(F), resumeSession</code>
* <p>
* where <code>start(B)</code> indicates a start in the background, and <code>start(F)</code>
* one in the foreground.
* <p>
* In this case the first two <code>start(B)</code> can be dropped because they don't contribute
* any relevant information for the batch call.
* <p>
* Essentially we drop every <code>start(B)</code> call, that is directly followed by any kind of
* a <code>start</code> call.
*
* @param requestData A list of the requests, stored on the device.
* @return A list of only these requests, which contain relevant information for the API call.
*/
private static List<Map<String, Object>> removeIrrelevantBackgroundStartRequests(
List<Map<String, Object>> requestData) {
List<Map<String, Object>> relevantRequests = new ArrayList<>();
int requestCount = requestData.size();
if (requestCount > 0) {
for (int i = 0; i < requestCount; i++) {
Map<String, Object> currentRequest = requestData.get(i);
if (i < requestCount - 1
&& Constants.Methods.START.equals(requestData.get(i + 1).get(Constants.Params.ACTION))
&& Constants.Methods.START.equals(currentRequest.get(Constants.Params.ACTION))
&& Boolean.TRUE.toString().equals(currentRequest.get(Constants.Params.BACKGROUND))) {
continue;
}
relevantRequests.add(currentRequest);
}
}
return relevantRequests;
}
private static String jsonEncodeUnsentRequests(List<Map<String, Object>> requestData) {
Map<String, Object> data = new HashMap<>();
data.put(Constants.Params.DATA, requestData);
return JsonConverter.toJson(data);
}
private static String getSizeAsString(int bytes) {
if (bytes < (1 << 10)) {
return bytes + " B";
} else if (bytes < (1 << 20)) {
return (bytes >> 10) + " KB";
} else {
return (bytes >> 20) + " MB";
}
}
private static void printUploadProgress() {
int totalFiles = fileUploadSize.size();
int sentFiles = 0;
int totalBytes = 0;
int sentBytes = 0;
for (Map.Entry<File, Long> entry : fileUploadSize.entrySet()) {
File file = entry.getKey();
long fileSize = entry.getValue();
double fileProgress = fileUploadProgress.get(file);
if (fileProgress == 1) {
sentFiles++;
}
sentBytes += (int) (fileSize * fileProgress);
totalBytes += fileSize;
}
String progressString = "Uploading resources. " +
sentFiles + '/' + totalFiles + " files completed; " +
getSizeAsString(sentBytes) + '/' + getSizeAsString(totalBytes) + " transferred.";
if (!fileUploadProgressString.equals(progressString)) {
fileUploadProgressString = progressString;
Log.i(progressString);
}
}
public void sendFilesNow(final List<String> filenames, final List<InputStream> streams) {
if (Constants.isTestMode) {
return;
}
final Map<String, Object> dict = createArgsDictionary();
if (!attachApiKeys(dict)) {
return;
}
final List<File> filesToUpload = new ArrayList<>();
// First set up the files for upload
for (int i = 0; i < filenames.size(); i++) {
String filename = filenames.get(i);
if (filename == null || Boolean.TRUE.equals(fileTransferStatus.get(filename))) {
continue;
}
File file = new File(filename);
long size;
try {
size = streams.get(i).available();
} catch (IOException e) {
size = file.length();
} catch (NullPointerException e) {
// Not good. Can't read asset.
Log.e("Unable to read file " + filename);
continue;
}
fileTransferStatus.put(filename, true);
filesToUpload.add(file);
fileUploadSize.put(file, size);
fileUploadProgress.put(file, 0.0);
}
if (filesToUpload.size() == 0) {
return;
}
printUploadProgress();
// Now upload the files
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
synchronized (uploadFileLock) { // Don't overload app and server with many upload tasks
JSONObject result;
HttpURLConnection op = null;
try {
op = Util.uploadFilesOperation(
Constants.Params.FILE,
filesToUpload,
streams,
Constants.API_HOST_NAME,
Constants.API_SERVLET,
dict,
httpMethod,
Constants.API_SSL,
60);
if (op != null) {
result = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request: " + statusCode);
}
if (Request.this.response != null) {
Request.this.response.response(result);
}
} else {
if (error != null) {
error.error(new Exception("Leanplum: Unable to read file."));
}
}
} catch (JSONException e) {
Log.e("Unable to convert to JSON.", e);
if (error != null) {
error.error(e);
}
} catch (SocketTimeoutException e) {
Log.e("Timeout uploading files. Try again or limit the number of files " +
"to upload with parameters to syncResourcesAsync.");
if (error != null) {
error.error(e);
}
} catch (Exception e) {
Log.e("Unable to send file.", e);
if (error != null) {
error.error(e);
}
} finally {
if (op != null) {
op.disconnect();
}
}
for (File file : filesToUpload) {
fileUploadProgress.put(file, 1.0);
}
printUploadProgress();
return null;
}
}
});
// TODO: Upload progress
}
void downloadFile(final String path, final String url) {
if (Constants.isTestMode) {
return;
}
if (Boolean.TRUE.equals(fileTransferStatus.get(path))) {
return;
}
pendingDownloads++;
Log.i("Downloading resource " + path);
fileTransferStatus.put(path, true);
final Map<String, Object> dict = createArgsDictionary();
dict.put(Constants.Keys.FILENAME, path);
if (!attachApiKeys(dict)) {
return;
}
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
downloadHelper(Constants.API_HOST_NAME, Constants.API_SERVLET, path, url, dict);
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
// TODO: Download progress
}
private void downloadHelper(String hostName, String servlet, final String path, final String url,
final Map<String, Object> dict) {
HttpURLConnection op = null;
URL originalURL = null;
try {
if (url == null) {
op = Util.operation(
hostName,
servlet,
dict,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
} else {
op = Util.createHttpUrlConnection(url, httpMethod, url.startsWith("https://"),
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
}
originalURL = op.getURL();
op.connect();
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request to: " + hostName +
", HTTP status code: " + statusCode);
}
Stack<String> dirs = new Stack<>();
String currentDir = path;
while ((currentDir = new File(currentDir).getParent()) != null) {
dirs.push(currentDir);
}
while (!dirs.isEmpty()) {
String directory = FileManager.fileRelativeToDocuments(dirs.pop());
boolean isCreated = new File(directory).mkdir();
if (!isCreated) {
Log.w("Failed to create directory: ", directory);
}
}
FileOutputStream out = new FileOutputStream(
new File(FileManager.fileRelativeToDocuments(path)));
Util.saveResponse(op, out);
pendingDownloads--;
if (Request.this.response != null) {
Request.this.response.response(null);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} catch (Exception e) {
if (e instanceof EOFException) {
if (op != null && !op.getURL().equals(originalURL)) {
downloadHelper(null, op.getURL().toString(), path, url, new HashMap<String, Object>());
return;
}
}
Log.e("Error downloading resource:" + path, e);
pendingDownloads--;
if (error != null) {
error.error(e);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} finally {
if (op != null) {
op.disconnect();
}
}
}
public static int numPendingDownloads() {
return pendingDownloads;
}
public static void onNoPendingDownloads(NoPendingDownloadsCallback block) {
noPendingDownloadsBlock = block;
}
public static int numResponses(JSONObject response) {
if (response == null) {
return 0;
}
try {
return response.getJSONArray("response").length();
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return 0;
}
}
public static JSONObject getResponseAt(JSONObject response, int index) {
try {
return response.getJSONArray("response").getJSONObject(index);
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
public static JSONObject getLastResponse(JSONObject response) {
int numResponses = numResponses(response);
if (numResponses > 0) {
return getResponseAt(response, numResponses - 1);
} else {
return null;
}
}
public static boolean isResponseSuccess(JSONObject response) {
if (response == null) {
return false;
}
try {
return response.getBoolean("success");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return false;
}
}
public static String getResponseError(JSONObject response) {
if (response == null) {
return null;
}
try {
JSONObject error = response.optJSONObject("error");
if (error == null) {
return null;
}
return error.getString("message");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
}
| AndroidSDKCore/src/main/java/com/leanplum/internal/Request.java | /*
* Copyright 2013, Leanplum, Inc. All rights reserved.
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package com.leanplum.internal;
import android.content.Context;
import android.content.SharedPreferences;
import android.os.AsyncTask;
import android.os.Build;
import android.support.annotation.NonNull;
import android.text.TextUtils;
import com.leanplum.Leanplum;
import com.leanplum.utils.SharedPreferencesUtil;
import org.json.JSONException;
import org.json.JSONObject;
import java.io.EOFException;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.SocketTimeoutException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Stack;
import java.util.UUID;
/**
* Leanplum request class.
*
* @author Andrew First
*/
public class Request {
private static final long DEVELOPMENT_MIN_DELAY_MS = 100;
private static final long DEVELOPMENT_MAX_DELAY_MS = 5000;
private static final long PRODUCTION_DELAY = 60000;
static final int MAX_EVENTS_PER_API_CALL;
static final String LEANPLUM = "__leanplum__";
static final String UUID_KEY = "uuid";
private static String appId;
private static String accessKey;
private static String deviceId;
private static String userId;
private static final LeanplumEventCallbackManager eventCallbackManager =
new LeanplumEventCallbackManager();
private static final Map<String, Boolean> fileTransferStatus = new HashMap<>();
private static int pendingDownloads;
private static NoPendingDownloadsCallback noPendingDownloadsBlock;
// The token is saved primarily for legacy SharedPreferences decryption. This could
// likely be removed in the future.
private static String token = null;
private static final Map<File, Long> fileUploadSize = new HashMap<>();
private static final Map<File, Double> fileUploadProgress = new HashMap<>();
private static String fileUploadProgressString = "";
private static long lastSendTimeMs;
private static final Object uploadFileLock = new Object();
private final String httpMethod;
private final String apiMethod;
private final Map<String, Object> params;
private ResponseCallback response;
private ErrorCallback error;
private boolean sent;
private long dataBaseIndex;
private static ApiResponseCallback apiResponse;
private static List<Map<String, Object>> localErrors = new ArrayList<>();
static {
if (Build.VERSION.SDK_INT <= 17) {
MAX_EVENTS_PER_API_CALL = 5000;
} else {
MAX_EVENTS_PER_API_CALL = 10000;
}
}
public static void setAppId(String appId, String accessKey) {
if (!TextUtils.isEmpty(appId)) {
Request.appId = appId.trim();
}
if (!TextUtils.isEmpty(accessKey)) {
Request.accessKey = accessKey.trim();
}
}
public static void setDeviceId(String deviceId) {
Request.deviceId = deviceId;
}
public static void setUserId(String userId) {
Request.userId = userId;
}
public static void setToken(String token) {
Request.token = token;
}
public static String token() {
return token;
}
/**
* Since requests are batched there can be a case where other Request can take future Request
* events. We need to have for each Request database index for handle response, error or start
* callbacks.
*
* @return Index of event at database.
*/
public long getDataBaseIndex() {
return dataBaseIndex;
}
// Update index of event at database.
public void setDataBaseIndex(long dataBaseIndex) {
this.dataBaseIndex = dataBaseIndex;
}
public static void loadToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
String token = defaults.getString(Constants.Defaults.TOKEN_KEY, null);
if (token == null) {
return;
}
setToken(token);
}
public static void saveToken() {
Context context = Leanplum.getContext();
SharedPreferences defaults = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = defaults.edit();
editor.putString(Constants.Defaults.TOKEN_KEY, Request.token());
SharedPreferencesUtil.commitChanges(editor);
}
public static String appId() {
return appId;
}
public static String deviceId() {
return deviceId;
}
public static String userId() {
return Request.userId;
}
public Request(String httpMethod, String apiMethod, Map<String, Object> params) {
this.httpMethod = httpMethod;
this.apiMethod = apiMethod;
this.params = params != null ? params : new HashMap<String, Object>();
// Check if it is error and here was SQLite exception.
if (Constants.Methods.LOG.equals(apiMethod) && LeanplumEventDataManager.willSendErrorLog) {
localErrors.add(createArgsDictionary());
}
// Make sure the Handler is initialized on the main thread.
OsHandler.getInstance();
dataBaseIndex = -1;
}
public static Request get(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("GET", apiMethod, params);
}
public static Request post(String apiMethod, Map<String, Object> params) {
Log.LeanplumLogType level = Constants.Methods.LOG.equals(apiMethod) ?
Log.LeanplumLogType.DEBUG : Log.LeanplumLogType.VERBOSE;
Log.log(level, "Will call API method " + apiMethod + " with arguments " + params);
return RequestFactory.getInstance().createRequest("POST", apiMethod, params);
}
public void onResponse(ResponseCallback response) {
this.response = response;
}
public void onError(ErrorCallback error) {
this.error = error;
}
public void onApiResponse(ApiResponseCallback apiResponse) {
Request.apiResponse = apiResponse;
}
private Map<String, Object> createArgsDictionary() {
Map<String, Object> args = new HashMap<>();
args.put(Constants.Params.DEVICE_ID, deviceId);
args.put(Constants.Params.USER_ID, userId);
args.put(Constants.Params.ACTION, apiMethod);
args.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
args.put(Constants.Params.DEV_MODE, Boolean.toString(Constants.isDevelopmentModeEnabled));
args.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
if (token != null) {
args.put(Constants.Params.TOKEN, token);
}
args.putAll(params);
return args;
}
private void saveRequestForLater(final Map<String, Object> args) {
final Request currentRequest = this;
LeanplumEventDataManager.executeAsyncTask(new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
synchronized (Request.class) {
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
long count = LeanplumEventDataManager.getEventsCount();
String uuid = preferences.getString(Constants.Defaults.UUID_KEY, null);
if (uuid == null || count % MAX_EVENTS_PER_API_CALL == 0) {
uuid = UUID.randomUUID().toString();
editor.putString(Constants.Defaults.UUID_KEY, uuid);
SharedPreferencesUtil.commitChanges(editor);
}
args.put(UUID_KEY, uuid);
LeanplumEventDataManager.insertEvent(JsonConverter.toJson(args));
dataBaseIndex = count;
// Checks if here response and/or error callback for this request. We need to add callbacks to
// eventCallbackManager only if here was internet connection, otherwise triggerErrorCallback
// will handle error callback for this event.
if (response != null || error != null && !Util.isConnected()) {
eventCallbackManager.addCallbacks(currentRequest, response, error);
}
}
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
public void send() {
this.sendEventually();
if (Constants.isDevelopmentModeEnabled) {
long currentTimeMs = System.currentTimeMillis();
long delayMs;
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > DEVELOPMENT_MAX_DELAY_MS) {
delayMs = DEVELOPMENT_MIN_DELAY_MS;
} else {
delayMs = (lastSendTimeMs + DEVELOPMENT_MAX_DELAY_MS) - currentTimeMs;
}
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfConnected();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, delayMs);
}
}
/**
* Wait 1 second for potential other API calls, and then sends the call synchronously if no other
* call has been sent within 1 minute.
*/
public void sendIfDelayed() {
sendEventually();
OsHandler.getInstance().postDelayed(new Runnable() {
@Override
public void run() {
try {
sendIfDelayedHelper();
} catch (Throwable t) {
Util.handleException(t);
}
}
}, 1000);
}
/**
* Sends the call synchronously if no other call has been sent within 1 minute.
*/
private void sendIfDelayedHelper() {
if (Constants.isDevelopmentModeEnabled) {
send();
} else {
long currentTimeMs = System.currentTimeMillis();
if (lastSendTimeMs == 0 || currentTimeMs - lastSendTimeMs > PRODUCTION_DELAY) {
sendIfConnected();
}
}
}
public void sendIfConnected() {
if (Util.isConnected()) {
this.sendNow();
} else {
this.sendEventually();
Log.i("Device is offline, will send later");
triggerErrorCallback(new Exception("Not connected to the Internet"));
}
}
private void triggerErrorCallback(Exception e) {
if (error != null) {
error.error(e);
}
if (apiResponse != null) {
List<Map<String, Object>> requests = getUnsentRequests();
List<Map<String, Object>> requestsToSend = removeIrrelevantBackgroundStartRequests(requests);
apiResponse.response(requestsToSend, null, requests.size());
}
}
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
private static boolean attachApiKeys(Map<String, Object> dict) {
if (appId == null || accessKey == null) {
Log.e("API keys are not set. Please use Leanplum.setAppIdForDevelopmentMode or "
+ "Leanplum.setAppIdForProductionMode.");
return false;
}
dict.put(Constants.Params.APP_ID, appId);
dict.put(Constants.Params.CLIENT_KEY, accessKey);
dict.put(Constants.Params.CLIENT, Constants.CLIENT);
return true;
}
public interface ResponseCallback {
void response(JSONObject response);
}
public interface ApiResponseCallback {
void response(List<Map<String, Object>> requests, JSONObject response, int countOfEvents);
}
public interface ErrorCallback {
void error(Exception e);
}
public interface NoPendingDownloadsCallback {
void noPendingDownloads();
}
/**
* Parse response body from server. Invoke potential error or response callbacks for all events
* of this request.
*
* @param responseBody JSONObject with response body from server.
* @param requestsToSend List of requests that were sent to the server/
* @param error Exception.
* @param unsentRequestsSize Size of unsent request, that we will delete.
*/
private void parseResponseBody(JSONObject responseBody, List<Map<String, Object>>
requestsToSend, Exception error, int unsentRequestsSize) {
synchronized (Request.class) {
if (responseBody == null && error != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(error, unsentRequestsSize);
return;
} else if (responseBody == null) {
return;
}
// Response for last start call.
if (apiResponse != null) {
apiResponse.response(requestsToSend, responseBody, unsentRequestsSize);
}
// We will replace it with error from response body, if we found it.
Exception lastResponseError = error;
// Valid response, parse and handle response body.
int numResponses = Request.numResponses(responseBody);
for (int i = 0; i < numResponses; i++) {
JSONObject response = Request.getResponseAt(responseBody, i);
if (Request.isResponseSuccess(response)) {
continue; // If event response is successful, proceed with next one.
}
// If event response was not successful, handle error.
String errorMessage = getReadableErrorMessage(Request.getResponseError(response));
Log.e(errorMessage);
// Throw an exception if last event response is negative.
if (i == numResponses - 1) {
lastResponseError = new Exception(errorMessage);
}
}
if (lastResponseError != null) {
// Invoke potential error callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksWithError(lastResponseError, unsentRequestsSize);
} else {
// Invoke potential response callbacks for all events of this request.
eventCallbackManager.invokeAllCallbacksForResponse(responseBody, unsentRequestsSize);
}
}
}
/**
* Parse error message from server response and return readable error message.
*
* @param errorMessage String of error from server response.
* @return String of readable error message.
*/
@NonNull
private String getReadableErrorMessage(String errorMessage) {
if (errorMessage == null || errorMessage.length() == 0) {
errorMessage = "API error";
} else if (errorMessage.startsWith("App not found")) {
errorMessage = "No app matching the provided app ID was found.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Invalid access key")) {
errorMessage = "The access key you provided is not valid for this app.";
Constants.isInPermanentFailureState = true;
} else if (errorMessage.startsWith("Development mode requested but not permitted")) {
errorMessage = "A call to Leanplum.setAppIdForDevelopmentMode "
+ "with your production key was made, which is not permitted.";
Constants.isInPermanentFailureState = true;
} else {
errorMessage = "API error: " + errorMessage;
}
return errorMessage;
}
private void sendNow() {
if (Constants.isTestMode) {
return;
}
if (appId == null) {
Log.e("Cannot send request. appId is not set.");
return;
}
if (accessKey == null) {
Log.e("Cannot send request. accessKey is not set.");
return;
}
this.sendEventually();
Util.executeAsyncTask(true, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
sendRequests();
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
}
private static class RequestsWithEncoding {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedString;
}
private RequestsWithEncoding getRequestsWithEncodedStringForErrors() {
List<Map<String, Object>> unsentRequests = new ArrayList<>();
List<Map<String, Object>> requestsToSend;
String jsonEncodedRequestsToSend;
String uuid = UUID.randomUUID().toString();
for (Map<String, Object> error : localErrors) {
error.put(UUID_KEY, uuid);
unsentRequests.add(error);
}
requestsToSend = unsentRequests;
jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend = requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private RequestsWithEncoding getRequestsWithEncodedStringStoredRequests() {
List<Map<String, Object>> unsentRequests;
List<Map<String, Object>> requestsToSend;
String jsonEncodedRequestsToSend;
RequestsWithEncoding requestsWithEncoding = new RequestsWithEncoding();
unsentRequests = getUnsentRequests();
requestsToSend = removeIrrelevantBackgroundStartRequests(unsentRequests);
jsonEncodedRequestsToSend = jsonEncodeUnsentRequests(unsentRequests);
requestsWithEncoding.unsentRequests = unsentRequests;
requestsWithEncoding.requestsToSend = requestsToSend;
requestsWithEncoding.jsonEncodedString = jsonEncodedRequestsToSend;
return requestsWithEncoding;
}
private RequestsWithEncoding getRequestsWithEncodedString() {
RequestsWithEncoding requestsWithEncoding;
// Check if we have localErrors, if yes then we will send only errors to the server.
if (localErrors.size() != 0) {
requestsWithEncoding = getRequestsWithEncodedStringForErrors();
} else {
requestsWithEncoding = getRequestsWithEncodedStringStoredRequests()
}
return requestsWithEncoding;
}
private void sendRequests() {
RequestsWithEncoding requestsWithEncoding = getRequestsWithEncodedString();
List<Map<String, Object>> unsentRequests = requestsWithEncoding.unsentRequests;
List<Map<String, Object>> requestsToSend = requestsWithEncoding.requestsToSend;
String jsonEncodedString = requestsWithEncoding.jsonEncodedString;
if (requestsToSend.isEmpty()) {
return;
}
final Map<String, Object> multiRequestArgs = new HashMap<>();
if (!Request.attachApiKeys(multiRequestArgs)) {
return;
}
multiRequestArgs.put(Constants.Params.DATA, jsonEncodedString);
multiRequestArgs.put(Constants.Params.SDK_VERSION, Constants.LEANPLUM_VERSION);
multiRequestArgs.put(Constants.Params.ACTION, Constants.Methods.MULTI);
multiRequestArgs.put(Constants.Params.TIME, Double.toString(new Date().getTime() / 1000.0));
JSONObject responseBody;
HttpURLConnection op = null;
try {
try {
op = Util.operation(
Constants.API_HOST_NAME,
Constants.API_SERVLET,
multiRequestArgs,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS);
responseBody = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
Exception errorException;
if (statusCode >= 200 && statusCode <= 299) {
if (responseBody == null) {
errorException = new Exception("Response JSON is null.");
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, errorException, unsentRequests.size());
return;
}
Exception exception = null;
// Checks if we received the same number of responses as a number of sent request.
int numResponses = Request.numResponses(responseBody);
if (numResponses != requestsToSend.size()) {
Log.w("Sent " + requestsToSend.size() + " requests but only" +
" received " + numResponses);
}
parseResponseBody(responseBody, requestsToSend, null, unsentRequests.size());
// Clear localErrors list.
localErrors.clear();
deleteSentRequests(unsentRequests.size());
// Send another request if the last request had maximum events per api call.
if (unsentRequests.size() == MAX_EVENTS_PER_API_CALL) {
sendRequests();
}
} else {
errorException = new Exception("HTTP error " + statusCode);
if (statusCode != -1 && statusCode != 408 && !(statusCode >= 500 && statusCode <= 599)) {
deleteSentRequests(unsentRequests.size());
parseResponseBody(responseBody, requestsToSend, errorException, unsentRequests.size());
}
}
} catch (JSONException e) {
Log.e("Error parsing JSON response: " + e.toString() + "\n" + Log.getStackTraceString(e));
deleteSentRequests(unsentRequests.size());
parseResponseBody(null, requestsToSend, e, unsentRequests.size());
} catch (Exception e) {
Log.e("Unable to send request: " + e.toString() + "\n" + Log.getStackTraceString(e));
} finally {
if (op != null) {
op.disconnect();
}
}
} catch (Throwable t) {
Util.handleException(t);
}
}
public void sendEventually() {
if (Constants.isTestMode) {
return;
}
if (LeanplumEventDataManager.willSendErrorLog) {
return;
}
if (!sent) {
sent = true;
Map<String, Object> args = createArgsDictionary();
saveRequestForLater(args);
}
}
static void deleteSentRequests(int requestsCount) {
if (requestsCount == 0) {
return;
}
synchronized (Request.class) {
LeanplumEventDataManager.deleteEvents(requestsCount);
}
}
private static List<Map<String, Object>> getUnsentRequests() {
List<Map<String, Object>> requestData;
synchronized (Request.class) {
lastSendTimeMs = System.currentTimeMillis();
Context context = Leanplum.getContext();
SharedPreferences preferences = context.getSharedPreferences(
LEANPLUM, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = preferences.edit();
requestData = LeanplumEventDataManager.getEvents(MAX_EVENTS_PER_API_CALL);
editor.remove(Constants.Defaults.UUID_KEY);
SharedPreferencesUtil.commitChanges(editor);
}
return requestData;
}
/**
* In various scenarios we can end up batching a big number of requests (e.g. device is offline,
* background sessions), which could make the stored API calls batch look something like:
* <p>
* <code>start(B), start(B), start(F), track, start(B), track, start(F), resumeSession</code>
* <p>
* where <code>start(B)</code> indicates a start in the background, and <code>start(F)</code>
* one in the foreground.
* <p>
* In this case the first two <code>start(B)</code> can be dropped because they don't contribute
* any relevant information for the batch call.
* <p>
* Essentially we drop every <code>start(B)</code> call, that is directly followed by any kind of
* a <code>start</code> call.
*
* @param requestData A list of the requests, stored on the device.
* @return A list of only these requests, which contain relevant information for the API call.
*/
private static List<Map<String, Object>> removeIrrelevantBackgroundStartRequests(
List<Map<String, Object>> requestData) {
List<Map<String, Object>> relevantRequests = new ArrayList<>();
int requestCount = requestData.size();
if (requestCount > 0) {
for (int i = 0; i < requestCount; i++) {
Map<String, Object> currentRequest = requestData.get(i);
if (i < requestCount - 1
&& Constants.Methods.START.equals(requestData.get(i + 1).get(Constants.Params.ACTION))
&& Constants.Methods.START.equals(currentRequest.get(Constants.Params.ACTION))
&& Boolean.TRUE.toString().equals(currentRequest.get(Constants.Params.BACKGROUND))) {
continue;
}
relevantRequests.add(currentRequest);
}
}
return relevantRequests;
}
private static String jsonEncodeUnsentRequests(List<Map<String, Object>> requestData) {
Map<String, Object> data = new HashMap<>();
data.put(Constants.Params.DATA, requestData);
return JsonConverter.toJson(data);
}
private static String getSizeAsString(int bytes) {
if (bytes < (1 << 10)) {
return bytes + " B";
} else if (bytes < (1 << 20)) {
return (bytes >> 10) + " KB";
} else {
return (bytes >> 20) + " MB";
}
}
private static void printUploadProgress() {
int totalFiles = fileUploadSize.size();
int sentFiles = 0;
int totalBytes = 0;
int sentBytes = 0;
for (Map.Entry<File, Long> entry : fileUploadSize.entrySet()) {
File file = entry.getKey();
long fileSize = entry.getValue();
double fileProgress = fileUploadProgress.get(file);
if (fileProgress == 1) {
sentFiles++;
}
sentBytes += (int) (fileSize * fileProgress);
totalBytes += fileSize;
}
String progressString = "Uploading resources. " +
sentFiles + '/' + totalFiles + " files completed; " +
getSizeAsString(sentBytes) + '/' + getSizeAsString(totalBytes) + " transferred.";
if (!fileUploadProgressString.equals(progressString)) {
fileUploadProgressString = progressString;
Log.i(progressString);
}
}
public void sendFilesNow(final List<String> filenames, final List<InputStream> streams) {
if (Constants.isTestMode) {
return;
}
final Map<String, Object> dict = createArgsDictionary();
if (!attachApiKeys(dict)) {
return;
}
final List<File> filesToUpload = new ArrayList<>();
// First set up the files for upload
for (int i = 0; i < filenames.size(); i++) {
String filename = filenames.get(i);
if (filename == null || Boolean.TRUE.equals(fileTransferStatus.get(filename))) {
continue;
}
File file = new File(filename);
long size;
try {
size = streams.get(i).available();
} catch (IOException e) {
size = file.length();
} catch (NullPointerException e) {
// Not good. Can't read asset.
Log.e("Unable to read file " + filename);
continue;
}
fileTransferStatus.put(filename, true);
filesToUpload.add(file);
fileUploadSize.put(file, size);
fileUploadProgress.put(file, 0.0);
}
if (filesToUpload.size() == 0) {
return;
}
printUploadProgress();
// Now upload the files
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
synchronized (uploadFileLock) { // Don't overload app and server with many upload tasks
JSONObject result;
HttpURLConnection op = null;
try {
op = Util.uploadFilesOperation(
Constants.Params.FILE,
filesToUpload,
streams,
Constants.API_HOST_NAME,
Constants.API_SERVLET,
dict,
httpMethod,
Constants.API_SSL,
60);
if (op != null) {
result = Util.getJsonResponse(op);
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request: " + statusCode);
}
if (Request.this.response != null) {
Request.this.response.response(result);
}
} else {
if (error != null) {
error.error(new Exception("Leanplum: Unable to read file."));
}
}
} catch (JSONException e) {
Log.e("Unable to convert to JSON.", e);
if (error != null) {
error.error(e);
}
} catch (SocketTimeoutException e) {
Log.e("Timeout uploading files. Try again or limit the number of files " +
"to upload with parameters to syncResourcesAsync.");
if (error != null) {
error.error(e);
}
} catch (Exception e) {
Log.e("Unable to send file.", e);
if (error != null) {
error.error(e);
}
} finally {
if (op != null) {
op.disconnect();
}
}
for (File file : filesToUpload) {
fileUploadProgress.put(file, 1.0);
}
printUploadProgress();
return null;
}
}
});
// TODO: Upload progress
}
void downloadFile(final String path, final String url) {
if (Constants.isTestMode) {
return;
}
if (Boolean.TRUE.equals(fileTransferStatus.get(path))) {
return;
}
pendingDownloads++;
Log.i("Downloading resource " + path);
fileTransferStatus.put(path, true);
final Map<String, Object> dict = createArgsDictionary();
dict.put(Constants.Keys.FILENAME, path);
if (!attachApiKeys(dict)) {
return;
}
Util.executeAsyncTask(false, new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
try {
downloadHelper(Constants.API_HOST_NAME, Constants.API_SERVLET, path, url, dict);
} catch (Throwable t) {
Util.handleException(t);
}
return null;
}
});
// TODO: Download progress
}
private void downloadHelper(String hostName, String servlet, final String path, final String url,
final Map<String, Object> dict) {
HttpURLConnection op = null;
URL originalURL = null;
try {
if (url == null) {
op = Util.operation(
hostName,
servlet,
dict,
httpMethod,
Constants.API_SSL,
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
} else {
op = Util.createHttpUrlConnection(url, httpMethod, url.startsWith("https://"),
Constants.NETWORK_TIMEOUT_SECONDS_FOR_DOWNLOADS);
}
originalURL = op.getURL();
op.connect();
int statusCode = op.getResponseCode();
if (statusCode != 200) {
throw new Exception("Leanplum: Error sending request to: " + hostName +
", HTTP status code: " + statusCode);
}
Stack<String> dirs = new Stack<>();
String currentDir = path;
while ((currentDir = new File(currentDir).getParent()) != null) {
dirs.push(currentDir);
}
while (!dirs.isEmpty()) {
String directory = FileManager.fileRelativeToDocuments(dirs.pop());
boolean isCreated = new File(directory).mkdir();
if (!isCreated) {
Log.w("Failed to create directory: ", directory);
}
}
FileOutputStream out = new FileOutputStream(
new File(FileManager.fileRelativeToDocuments(path)));
Util.saveResponse(op, out);
pendingDownloads--;
if (Request.this.response != null) {
Request.this.response.response(null);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} catch (Exception e) {
if (e instanceof EOFException) {
if (op != null && !op.getURL().equals(originalURL)) {
downloadHelper(null, op.getURL().toString(), path, url, new HashMap<String, Object>());
return;
}
}
Log.e("Error downloading resource:" + path, e);
pendingDownloads--;
if (error != null) {
error.error(e);
}
if (pendingDownloads == 0 && noPendingDownloadsBlock != null) {
noPendingDownloadsBlock.noPendingDownloads();
}
} finally {
if (op != null) {
op.disconnect();
}
}
}
public static int numPendingDownloads() {
return pendingDownloads;
}
public static void onNoPendingDownloads(NoPendingDownloadsCallback block) {
noPendingDownloadsBlock = block;
}
public static int numResponses(JSONObject response) {
if (response == null) {
return 0;
}
try {
return response.getJSONArray("response").length();
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return 0;
}
}
public static JSONObject getResponseAt(JSONObject response, int index) {
try {
return response.getJSONArray("response").getJSONObject(index);
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
public static JSONObject getLastResponse(JSONObject response) {
int numResponses = numResponses(response);
if (numResponses > 0) {
return getResponseAt(response, numResponses - 1);
} else {
return null;
}
}
public static boolean isResponseSuccess(JSONObject response) {
if (response == null) {
return false;
}
try {
return response.getBoolean("success");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return false;
}
}
public static String getResponseError(JSONObject response) {
if (response == null) {
return null;
}
try {
JSONObject error = response.optJSONObject("error");
if (error == null) {
return null;
}
return error.getString("message");
} catch (JSONException e) {
Log.e("Could not parse JSON response.", e);
return null;
}
}
}
| Add missing semicolon | AndroidSDKCore/src/main/java/com/leanplum/internal/Request.java | Add missing semicolon |
|
Java | apache-2.0 | 0ae09f138458f287a7b1c8e8c4321ff2d3916aae | 0 | Distrotech/intellij-community,robovm/robovm-studio,kool79/intellij-community,retomerz/intellij-community,consulo/consulo,supersven/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,izonder/intellij-community,slisson/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,alphafoobar/intellij-community,ol-loginov/intellij-community,diorcety/intellij-community,FHannes/intellij-community,FHannes/intellij-community,TangHao1987/intellij-community,jexp/idea2,samthor/intellij-community,retomerz/intellij-community,salguarnieri/intellij-community,joewalnes/idea-community,orekyuu/intellij-community,caot/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,samthor/intellij-community,ThiagoGarciaAlves/intellij-community,wreckJ/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,alphafoobar/intellij-community,dslomov/intellij-community,jagguli/intellij-community,ibinti/intellij-community,diorcety/intellij-community,diorcety/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,akosyakov/intellij-community,gnuhub/intellij-community,FHannes/intellij-community,petteyg/intellij-community,blademainer/intellij-community,tmpgit/intellij-community,gnuhub/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,lucafavatella/intellij-community,ahb0327/intellij-community,nicolargo/intellij-community,da1z/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,mglukhikh/intellij-community,MichaelNedzelsky/intellij-community,Lekanich/intellij-community,Lekanich/intellij-community,joewalnes/idea-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,ol-loginov/intellij-community,jagguli/intellij-community,blademainer/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,pwoodworth/intellij-community,vvv1559/intellij-community,dslomov/intellij-community,caot/intellij-community,retomerz/intellij-community,jagguli/intellij-community,retomerz/intellij-community,kool79/intellij-community,ol-loginov/intellij-community,muntasirsyed/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,fengbaicanhe/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,slisson/intellij-community,youdonghai/intellij-community,semonte/intellij-community,ibinti/intellij-community,xfournet/intellij-community,apixandru/intellij-community,Distrotech/intellij-community,ernestp/consulo,vvv1559/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,adedayo/intellij-community,fnouama/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,ibinti/intellij-community,semonte/intellij-community,xfournet/intellij-community,vladmm/intellij-community,amith01994/intellij-community,adedayo/intellij-community,retomerz/intellij-community,fitermay/intellij-community,diorcety/intellij-community,adedayo/intellij-community,ThiagoGarciaAlves/intellij-community,akosyakov/intellij-community,ernestp/consulo,joewalnes/idea-community,fengbaicanhe/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,idea4bsd/idea4bsd,adedayo/intellij-community,caot/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,TangHao1987/intellij-community,jagguli/intellij-community,amith01994/intellij-community,vladmm/intellij-community,vladmm/intellij-community,SerCeMan/intellij-community,jagguli/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,TangHao1987/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,ahb0327/intellij-community,FHannes/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,Distrotech/intellij-community,orekyuu/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,ftomassetti/intellij-community,ThiagoGarciaAlves/intellij-community,adedayo/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,jexp/idea2,gnuhub/intellij-community,ol-loginov/intellij-community,vvv1559/intellij-community,SerCeMan/intellij-community,MER-GROUP/intellij-community,gnuhub/intellij-community,diorcety/intellij-community,xfournet/intellij-community,youdonghai/intellij-community,adedayo/intellij-community,fengbaicanhe/intellij-community,jagguli/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,hurricup/intellij-community,signed/intellij-community,robovm/robovm-studio,asedunov/intellij-community,ibinti/intellij-community,nicolargo/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,jexp/idea2,kool79/intellij-community,fitermay/intellij-community,petteyg/intellij-community,Distrotech/intellij-community,xfournet/intellij-community,da1z/intellij-community,allotria/intellij-community,holmes/intellij-community,kdwink/intellij-community,semonte/intellij-community,blademainer/intellij-community,vladmm/intellij-community,kdwink/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,ryano144/intellij-community,caot/intellij-community,amith01994/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,petteyg/intellij-community,asedunov/intellij-community,samthor/intellij-community,signed/intellij-community,dslomov/intellij-community,semonte/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,akosyakov/intellij-community,slisson/intellij-community,hurricup/intellij-community,hurricup/intellij-community,FHannes/intellij-community,adedayo/intellij-community,tmpgit/intellij-community,caot/intellij-community,ibinti/intellij-community,jagguli/intellij-community,tmpgit/intellij-community,asedunov/intellij-community,allotria/intellij-community,dslomov/intellij-community,TangHao1987/intellij-community,idea4bsd/idea4bsd,ryano144/intellij-community,ryano144/intellij-community,ol-loginov/intellij-community,suncycheng/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,salguarnieri/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,dslomov/intellij-community,izonder/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,Distrotech/intellij-community,izonder/intellij-community,izonder/intellij-community,FHannes/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,MichaelNedzelsky/intellij-community,orekyuu/intellij-community,jexp/idea2,fnouama/intellij-community,mglukhikh/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,wreckJ/intellij-community,joewalnes/idea-community,asedunov/intellij-community,orekyuu/intellij-community,jexp/idea2,salguarnieri/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,samthor/intellij-community,salguarnieri/intellij-community,kool79/intellij-community,tmpgit/intellij-community,wreckJ/intellij-community,muntasirsyed/intellij-community,hurricup/intellij-community,ol-loginov/intellij-community,allotria/intellij-community,FHannes/intellij-community,Distrotech/intellij-community,holmes/intellij-community,ibinti/intellij-community,ibinti/intellij-community,kool79/intellij-community,ryano144/intellij-community,amith01994/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,supersven/intellij-community,akosyakov/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,semonte/intellij-community,xfournet/intellij-community,asedunov/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,alphafoobar/intellij-community,xfournet/intellij-community,da1z/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,lucafavatella/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,da1z/intellij-community,slisson/intellij-community,apixandru/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,kool79/intellij-community,akosyakov/intellij-community,adedayo/intellij-community,SerCeMan/intellij-community,suncycheng/intellij-community,consulo/consulo,samthor/intellij-community,wreckJ/intellij-community,suncycheng/intellij-community,alphafoobar/intellij-community,muntasirsyed/intellij-community,nicolargo/intellij-community,fnouama/intellij-community,holmes/intellij-community,lucafavatella/intellij-community,MER-GROUP/intellij-community,fnouama/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,muntasirsyed/intellij-community,xfournet/intellij-community,jexp/idea2,youdonghai/intellij-community,slisson/intellij-community,ahb0327/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,nicolargo/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,blademainer/intellij-community,kool79/intellij-community,xfournet/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,supersven/intellij-community,clumsy/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,allotria/intellij-community,tmpgit/intellij-community,alphafoobar/intellij-community,slisson/intellij-community,muntasirsyed/intellij-community,mglukhikh/intellij-community,kool79/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,apixandru/intellij-community,caot/intellij-community,kdwink/intellij-community,kdwink/intellij-community,clumsy/intellij-community,suncycheng/intellij-community,blademainer/intellij-community,idea4bsd/idea4bsd,consulo/consulo,diorcety/intellij-community,apixandru/intellij-community,consulo/consulo,fengbaicanhe/intellij-community,ivan-fedorov/intellij-community,MER-GROUP/intellij-community,clumsy/intellij-community,robovm/robovm-studio,jagguli/intellij-community,holmes/intellij-community,idea4bsd/idea4bsd,Lekanich/intellij-community,amith01994/intellij-community,asedunov/intellij-community,ernestp/consulo,vladmm/intellij-community,robovm/robovm-studio,fnouama/intellij-community,izonder/intellij-community,SerCeMan/intellij-community,TangHao1987/intellij-community,asedunov/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,izonder/intellij-community,slisson/intellij-community,caot/intellij-community,MER-GROUP/intellij-community,ftomassetti/intellij-community,samthor/intellij-community,allotria/intellij-community,dslomov/intellij-community,vvv1559/intellij-community,holmes/intellij-community,jexp/idea2,SerCeMan/intellij-community,Lekanich/intellij-community,youdonghai/intellij-community,semonte/intellij-community,vvv1559/intellij-community,fitermay/intellij-community,amith01994/intellij-community,ryano144/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,akosyakov/intellij-community,allotria/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,petteyg/intellij-community,clumsy/intellij-community,hurricup/intellij-community,hurricup/intellij-community,diorcety/intellij-community,orekyuu/intellij-community,muntasirsyed/intellij-community,signed/intellij-community,hurricup/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,semonte/intellij-community,petteyg/intellij-community,vladmm/intellij-community,supersven/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,gnuhub/intellij-community,holmes/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,salguarnieri/intellij-community,izonder/intellij-community,fitermay/intellij-community,slisson/intellij-community,adedayo/intellij-community,michaelgallacher/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,slisson/intellij-community,retomerz/intellij-community,supersven/intellij-community,kdwink/intellij-community,blademainer/intellij-community,allotria/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,apixandru/intellij-community,alphafoobar/intellij-community,fitermay/intellij-community,kool79/intellij-community,diorcety/intellij-community,fengbaicanhe/intellij-community,robovm/robovm-studio,kdwink/intellij-community,allotria/intellij-community,idea4bsd/idea4bsd,fengbaicanhe/intellij-community,hurricup/intellij-community,vladmm/intellij-community,ThiagoGarciaAlves/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,pwoodworth/intellij-community,fnouama/intellij-community,retomerz/intellij-community,idea4bsd/idea4bsd,retomerz/intellij-community,michaelgallacher/intellij-community,caot/intellij-community,fnouama/intellij-community,samthor/intellij-community,supersven/intellij-community,Distrotech/intellij-community,signed/intellij-community,TangHao1987/intellij-community,clumsy/intellij-community,salguarnieri/intellij-community,tmpgit/intellij-community,apixandru/intellij-community,asedunov/intellij-community,michaelgallacher/intellij-community,jagguli/intellij-community,signed/intellij-community,petteyg/intellij-community,ernestp/consulo,ivan-fedorov/intellij-community,ryano144/intellij-community,dslomov/intellij-community,da1z/intellij-community,diorcety/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,signed/intellij-community,blademainer/intellij-community,signed/intellij-community,TangHao1987/intellij-community,FHannes/intellij-community,da1z/intellij-community,MichaelNedzelsky/intellij-community,izonder/intellij-community,akosyakov/intellij-community,kdwink/intellij-community,fitermay/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,ftomassetti/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,supersven/intellij-community,MichaelNedzelsky/intellij-community,wreckJ/intellij-community,dslomov/intellij-community,ftomassetti/intellij-community,salguarnieri/intellij-community,SerCeMan/intellij-community,holmes/intellij-community,slisson/intellij-community,wreckJ/intellij-community,supersven/intellij-community,amith01994/intellij-community,semonte/intellij-community,consulo/consulo,retomerz/intellij-community,amith01994/intellij-community,salguarnieri/intellij-community,xfournet/intellij-community,jexp/idea2,salguarnieri/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,ryano144/intellij-community,blademainer/intellij-community,alphafoobar/intellij-community,blademainer/intellij-community,diorcety/intellij-community,ol-loginov/intellij-community,da1z/intellij-community,Distrotech/intellij-community,robovm/robovm-studio,youdonghai/intellij-community,retomerz/intellij-community,holmes/intellij-community,joewalnes/idea-community,apixandru/intellij-community,joewalnes/idea-community,samthor/intellij-community,ahb0327/intellij-community,gnuhub/intellij-community,fnouama/intellij-community,caot/intellij-community,mglukhikh/intellij-community,ryano144/intellij-community,tmpgit/intellij-community,allotria/intellij-community,vladmm/intellij-community,gnuhub/intellij-community,fengbaicanhe/intellij-community,asedunov/intellij-community,MER-GROUP/intellij-community,dslomov/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,alphafoobar/intellij-community,da1z/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,FHannes/intellij-community,ahb0327/intellij-community,vvv1559/intellij-community,consulo/consulo,suncycheng/intellij-community,muntasirsyed/intellij-community,izonder/intellij-community,mglukhikh/intellij-community,orekyuu/intellij-community,kool79/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,TangHao1987/intellij-community,suncycheng/intellij-community,samthor/intellij-community,da1z/intellij-community,fnouama/intellij-community,hurricup/intellij-community,wreckJ/intellij-community,ahb0327/intellij-community,apixandru/intellij-community,wreckJ/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,retomerz/intellij-community,alphafoobar/intellij-community,clumsy/intellij-community,ThiagoGarciaAlves/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,diorcety/intellij-community,ibinti/intellij-community,ernestp/consulo,izonder/intellij-community,da1z/intellij-community,robovm/robovm-studio,gnuhub/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,lucafavatella/intellij-community,SerCeMan/intellij-community,alphafoobar/intellij-community,caot/intellij-community,clumsy/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,semonte/intellij-community,fnouama/intellij-community,signed/intellij-community,izonder/intellij-community,petteyg/intellij-community,ivan-fedorov/intellij-community,hurricup/intellij-community,clumsy/intellij-community,diorcety/intellij-community,jagguli/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,jagguli/intellij-community,slisson/intellij-community,ivan-fedorov/intellij-community,tmpgit/intellij-community,caot/intellij-community,supersven/intellij-community,da1z/intellij-community,ryano144/intellij-community,supersven/intellij-community,kdwink/intellij-community,ivan-fedorov/intellij-community,da1z/intellij-community,robovm/robovm-studio,robovm/robovm-studio,asedunov/intellij-community,SerCeMan/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,fnouama/intellij-community,MichaelNedzelsky/intellij-community,fengbaicanhe/intellij-community,amith01994/intellij-community,mglukhikh/intellij-community,ahb0327/intellij-community,adedayo/intellij-community,ftomassetti/intellij-community,blademainer/intellij-community,wreckJ/intellij-community,nicolargo/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,ftomassetti/intellij-community,MER-GROUP/intellij-community,idea4bsd/idea4bsd,joewalnes/idea-community,youdonghai/intellij-community,michaelgallacher/intellij-community,Lekanich/intellij-community,ivan-fedorov/intellij-community,fitermay/intellij-community,ryano144/intellij-community,nicolargo/intellij-community,pwoodworth/intellij-community,lucafavatella/intellij-community,samthor/intellij-community,orekyuu/intellij-community,ivan-fedorov/intellij-community,robovm/robovm-studio,petteyg/intellij-community,nicolargo/intellij-community,signed/intellij-community,semonte/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,gnuhub/intellij-community,allotria/intellij-community,akosyakov/intellij-community,fengbaicanhe/intellij-community,ernestp/consulo,nicolargo/intellij-community,jagguli/intellij-community,semonte/intellij-community,MichaelNedzelsky/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,xfournet/intellij-community,samthor/intellij-community,izonder/intellij-community,signed/intellij-community,holmes/intellij-community,allotria/intellij-community,SerCeMan/intellij-community,ftomassetti/intellij-community,vvv1559/intellij-community,ahb0327/intellij-community,MER-GROUP/intellij-community,asedunov/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,joewalnes/idea-community,amith01994/intellij-community,Distrotech/intellij-community,lucafavatella/intellij-community,clumsy/intellij-community,MichaelNedzelsky/intellij-community,michaelgallacher/intellij-community,MER-GROUP/intellij-community,pwoodworth/intellij-community,michaelgallacher/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,signed/intellij-community,suncycheng/intellij-community,petteyg/intellij-community,kool79/intellij-community,joewalnes/idea-community,MichaelNedzelsky/intellij-community,ibinti/intellij-community,fengbaicanhe/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,allotria/intellij-community,Lekanich/intellij-community,robovm/robovm-studio,petteyg/intellij-community,Distrotech/intellij-community,supersven/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,pwoodworth/intellij-community,ol-loginov/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,fnouama/intellij-community | /*
* Copyright (c) 2005 Your Corporation. All Rights Reserved.
*/
package com.intellij.util.xml.impl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.psi.util.PropertyUtil;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.Function;
import com.intellij.util.containers.BidirectionalMap;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xml.*;
import com.intellij.util.xml.reflect.*;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.*;
/**
* @author peter
*/
public class GenericInfoImpl implements DomGenericInfo {
private static final Logger LOG = Logger.getInstance("#com.intellij.util.xml.impl.GenericInfoImpl");
private final Class<? extends DomElement> myClass;
private DomManagerImpl myDomManager;
private final BidirectionalMap<JavaMethodSignature, Pair<String, Integer>> myFixedChildrenMethods =
new BidirectionalMap<JavaMethodSignature, Pair<String, Integer>>();
private final Map<String, Integer> myFixedChildrenCounts = new HashMap<String, Integer>();
private final Map<JavaMethodSignature, String> myCollectionChildrenGetterMethods = new HashMap<JavaMethodSignature, String>();
private final Map<JavaMethodSignature, String> myCollectionChildrenAdditionMethods = new HashMap<JavaMethodSignature, String>();
private final Map<String, Type> myCollectionChildrenClasses = new HashMap<String, Type>();
private final Map<JavaMethodSignature, String> myAttributeChildrenMethods = new HashMap<JavaMethodSignature, String>();
private final Map<JavaMethodSignature, Set<String>> myCompositeChildrenMethods = new HashMap<JavaMethodSignature, Set<String>>();
private final Map<JavaMethodSignature, Pair<String, Set<String>>> myCompositeCollectionAdditionMethods =
new HashMap<JavaMethodSignature, Pair<String, Set<String>>>();
private final Set<JavaMethodSignature> myRequiredChildrenGetters = new HashSet<JavaMethodSignature>();
@Nullable private Method myNameValueGetter;
private boolean myValueElement;
private boolean myInitialized;
private static final HashSet ADDER_PARAMETER_TYPES = new HashSet<Class>(Arrays.asList(Class.class, int.class));
public GenericInfoImpl(final Class<? extends DomElement> aClass, final DomManagerImpl domManager) {
myClass = aClass;
myDomManager = domManager;
}
final int getFixedChildrenCount(String qname) {
final Integer integer = myFixedChildrenCounts.get(qname);
return integer == null ? 0 : integer;
}
final JavaMethodSignature getFixedChildGetter(final Pair<String, Integer> pair) {
return myFixedChildrenMethods.getKeysByValue(pair).get(0);
}
final Set<Map.Entry<JavaMethodSignature, String>> getCollectionChildrenEntries() {
return myCollectionChildrenGetterMethods.entrySet();
}
final Type getCollectionChildrenType(String tagName) {
return myCollectionChildrenClasses.get(tagName);
}
final Set<Map.Entry<JavaMethodSignature, String>> getAttributeChildrenEntries() {
return myAttributeChildrenMethods.entrySet();
}
final Set<String> getFixedChildrenNames() {
return myFixedChildrenCounts.keySet();
}
final Set<String> getCollectionChildrenNames() {
return myCollectionChildrenClasses.keySet();
}
final Collection<String> getAttributeChildrenNames() {
return myAttributeChildrenMethods.values();
}
final Pair<String, Integer> getFixedChildInfo(JavaMethodSignature method) {
return myFixedChildrenMethods.get(method);
}
final String getAttributeName(JavaMethodSignature method) {
return myAttributeChildrenMethods.get(method);
}
private static boolean isCoreMethod(final Method method) {
final Class<?> aClass = method.getDeclaringClass();
return aClass.isAssignableFrom(DomElement.class) || aClass.equals(GenericAttributeValue.class);
}
@Nullable
private String getSubTagName(final JavaMethodSignature method) {
final SubTag subTagAnnotation = method.findAnnotation(SubTag.class, myClass);
if (subTagAnnotation == null || StringUtil.isEmpty(subTagAnnotation.value())) {
return getNameFromMethod(method, false);
}
return subTagAnnotation.value();
}
@Nullable
private String getSubTagNameForCollection(final JavaMethodSignature method) {
final SubTagList subTagList = method.findAnnotation(SubTagList.class, myClass);
if (subTagList == null || StringUtil.isEmpty(subTagList.value())) {
final String propertyName = getPropertyName(method);
return propertyName != null ? getNameStrategy(false).convertName(StringUtil.unpluralize(propertyName)) : null;
}
return subTagList.value();
}
@Nullable
private String getNameFromMethod(final JavaMethodSignature method, boolean isAttribute) {
final String propertyName = getPropertyName(method);
return propertyName == null ? null : getNameStrategy(isAttribute).convertName(propertyName);
}
private static String getPropertyName(JavaMethodSignature method) {
return PropertyUtil.getPropertyName(method.getMethodName());
}
@NotNull
private DomNameStrategy getNameStrategy(boolean isAttribute) {
final DomNameStrategy strategy = DomImplUtil.getDomNameStrategy(DomUtil.getRawType(myClass));
if (strategy != null) {
return strategy;
}
else {
return DomNameStrategy.HYPHEN_STRATEGY;
}
}
public final synchronized void buildMethodMaps() {
if (myInitialized) return;
myInitialized = true;
final Set<Method> methods = new HashSet<Method>(Arrays.asList(myClass.getMethods()));
final Set<JavaMethodSignature> removedSignatures = new HashSet<JavaMethodSignature>();
final Class implClass = myDomManager.getImplementation(myClass);
if (implClass != null) {
for (Method method : implClass.getMethods()) {
if (!Modifier.isAbstract(method.getModifiers())) {
removedSignatures.add(JavaMethodSignature.getSignature(method));
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
final Method method = iterator.next();
try {
if (!Modifier.isAbstract(implClass.getMethod(method.getName(), method.getParameterTypes()).getModifiers())) {
iterator.remove();
}
}
catch (NoSuchMethodException e) {
}
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
final Method method = iterator.next();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
if (DomUtil.findAnnotationDFS(method, Required.class) != null) {
myRequiredChildrenGetters.add(signature);
}
if (isCoreMethod(method) || DomImplUtil.isTagValueSetter(method) || isCustomMethod(signature)) {
if (signature.findAnnotation(NameValue.class, myClass) != null) {
myNameValueGetter = method;
}
removedSignatures.add(signature);
iterator.remove();
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
Method method = iterator.next();
if (DomImplUtil.isGetter(method) && processGetterMethod(method)) {
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
if (signature.findAnnotation(NameValue.class, myClass) != null) {
myNameValueGetter = method;
}
removedSignatures.add(signature);
iterator.remove();
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
Method method = iterator.next();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
final SubTagsList subTagsList = signature.findAnnotation(SubTagsList.class, myClass);
if (subTagsList != null && method.getName().startsWith("add")) {
final String tagName = subTagsList.tagName();
assert StringUtil.isNotEmpty(tagName);
final Set<String> set = new HashSet<String>(Arrays.asList(subTagsList.value()));
assert set.contains(tagName);
myCompositeCollectionAdditionMethods.put(signature, Pair.create(tagName, set));
iterator.remove();
}
else if (isAddMethod(method, signature)) {
myCollectionChildrenAdditionMethods.put(signature, extractTagName(signature, "add"));
removedSignatures.add(JavaMethodSignature.getSignature(method));
iterator.remove();
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
Method method = iterator.next();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
if (removedSignatures.contains(signature)) {
iterator.remove();
}
}
if (false) {
if (!methods.isEmpty()) {
StringBuilder sb = new StringBuilder(myClass + " should provide the following implementations:");
for (Method method : methods) {
sb.append("\n " + method);
}
assert false : sb.toString();
//System.out.println(sb.toString());
}
}
}
private boolean isAddMethod(Method method, JavaMethodSignature signature) {
final String tagName = extractTagName(signature, "add");
if (tagName == null) return false;
final Type childrenClass = getCollectionChildrenType(tagName);
if (childrenClass == null || !DomUtil.getRawType(childrenClass).isAssignableFrom(method.getReturnType())) return false;
return ADDER_PARAMETER_TYPES.containsAll(Arrays.asList(method.getParameterTypes()));
}
@Nullable
private String extractTagName(JavaMethodSignature method, @NonNls String prefix) {
final String name = method.getMethodName();
if (!name.startsWith(prefix)) return null;
final SubTagList subTagAnnotation = method.findAnnotation(SubTagList.class, myClass);
if (subTagAnnotation != null && !StringUtil.isEmpty(subTagAnnotation.value())) {
return subTagAnnotation.value();
}
final String tagName = getNameStrategy(false).convertName(name.substring(prefix.length()));
return StringUtil.isEmpty(tagName) ? null : tagName;
}
private boolean processGetterMethod(final Method method) {
if (DomImplUtil.isTagValueGetter(method)) {
myValueElement = true;
return true;
}
final boolean isAttributeValueMethod = method.getReturnType().equals(GenericAttributeValue.class);
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
final Attribute annotation = signature.findAnnotation(Attribute.class, myClass);
final boolean isAttributeMethod = annotation != null || isAttributeValueMethod;
if (annotation != null) {
assert
isAttributeValueMethod || method.getReturnType().isAssignableFrom(GenericAttributeValue.class) :
method + " should return " + GenericAttributeValue.class;
}
if (isAttributeMethod) {
final String s = annotation == null ? null : annotation.value();
String attributeName = StringUtil.isEmpty(s) ? getNameFromMethod(signature, true) : s;
assert StringUtil.isNotEmpty(attributeName) : "Can't guess attribute name from method name: " + method.getName();
myAttributeChildrenMethods.put(signature, attributeName);
return true;
}
if (isDomElement(method.getReturnType())) {
final String qname = getSubTagName(signature);
if (qname != null) {
assert!isCollectionChild(qname) : "Collection and fixed children cannot intersect: " + qname;
int index = 0;
final SubTag subTagAnnotation = signature.findAnnotation(SubTag.class, myClass);
if (subTagAnnotation != null && subTagAnnotation.index() != 0) {
index = subTagAnnotation.index();
}
myFixedChildrenMethods.put(signature, new Pair<String, Integer>(qname, index));
final Integer integer = myFixedChildrenCounts.get(qname);
if (integer == null || integer < index + 1) {
myFixedChildrenCounts.put(qname, index + 1);
}
return true;
}
}
final Type type = DomImplUtil.extractCollectionElementType(method.getGenericReturnType());
if (isDomElement(type)) {
final SubTagsList subTagsList = method.getAnnotation(SubTagsList.class);
if (subTagsList != null) {
myCompositeChildrenMethods.put(signature, new HashSet<String>(Arrays.asList(subTagsList.value())));
return true;
}
final String qname = getSubTagNameForCollection(signature);
if (qname != null) {
assert!isFixedChild(qname) : "Collection and fixed children cannot intersect: " + qname;
myCollectionChildrenClasses.put(qname, type);
myCollectionChildrenGetterMethods.put(signature, qname);
return true;
}
}
return false;
}
private boolean isCustomMethod(final JavaMethodSignature method) {
return method.findAnnotation(PropertyAccessor.class, myClass) != null;
}
public final Invocation createInvocation(final Method method) {
buildMethodMaps();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
final PropertyAccessor accessor = signature.findAnnotation(PropertyAccessor.class, myClass);
if (accessor != null) {
return createPropertyAccessorInvocation(accessor);
}
if (myAttributeChildrenMethods.containsKey(signature)) {
return new GetAttributeChildInvocation(signature);
}
if (myFixedChildrenMethods.containsKey(signature)) {
return new GetFixedChildInvocation(signature);
}
final Set<String> qnames = myCompositeChildrenMethods.get(signature);
if (qnames != null) {
return new Invocation() {
public Object invoke(final DomInvocationHandler handler, final Object[] args) throws Throwable {
for (final String qname : qnames) {
handler.checkInitialized(qname);
}
final XmlTag tag = handler.getXmlTag();
if (tag == null) return Collections.emptyList();
final List<DomElement> list = new ArrayList<DomElement>();
for (final XmlTag subTag : tag.getSubTags()) {
if (qnames.contains(subTag.getLocalName())) {
final DomInvocationHandler element = DomManagerImpl.getCachedElement(subTag);
if (element != null) {
list.add(element.getProxy());
}
}
}
return list;
}
};
}
final Pair<String, Set<String>> pair = myCompositeCollectionAdditionMethods.get(signature);
if (pair != null) {
final Set<String> qnames1 = pair.second;
final String tagName = pair.first;
final Type type = method.getGenericReturnType();
return new Invocation() {
public Object invoke(final DomInvocationHandler handler, final Object[] args) throws Throwable {
final VirtualFile virtualFile = handler.getFile().getVirtualFile();
if (virtualFile != null && !virtualFile.isWritable()) {
VirtualFileManager.getInstance().fireReadOnlyModificationAttempt(virtualFile);
return null;
}
for (final String qname : qnames1) {
handler.checkInitialized(qname);
}
final XmlTag tag = handler.ensureTagExists();
int index = args != null && args.length == 1 ? (Integer)args[0] : Integer.MAX_VALUE;
XmlTag lastTag = null;
int i = 0;
final XmlTag[] tags = tag.getSubTags();
for (final XmlTag subTag : tags) {
if (i == index) break;
if (qnames1.contains(subTag.getLocalName())) {
final DomInvocationHandler element = DomManagerImpl.getCachedElement(subTag);
if (element != null) {
lastTag = subTag;
i++;
}
}
}
final DomManagerImpl manager = handler.getManager();
final boolean b = manager.setChanging(true);
try {
final XmlTag emptyTag = tag.getManager().getElementFactory().createTagFromText("<" + tagName + "/>");
final XmlTag newTag;
if (lastTag == null) {
if (tags.length == 0) {
newTag = (XmlTag)tag.add(emptyTag);
}
else {
newTag = (XmlTag)tag.addBefore(emptyTag, tags[0]);
}
}
else {
newTag = (XmlTag)tag.addAfter(emptyTag, lastTag);
}
return handler.createCollectionElement(type, newTag);
}
finally {
manager.setChanging(b);
}
}
};
}
String qname = myCollectionChildrenGetterMethods.get(signature);
if (qname != null) {
return new GetCollectionChildInvocation(qname);
}
qname = myCollectionChildrenAdditionMethods.get(signature);
if (qname != null) {
return new AddChildInvocation(getTypeGetter(method), getIndexGetter(method), qname, myCollectionChildrenClasses.get(qname));
}
throw new UnsupportedOperationException("No implementation for method " + method.toString() + " in class " + myClass);
}
private Invocation createPropertyAccessorInvocation(final PropertyAccessor accessor) {
final String[] names = accessor.value();
final Method[] methods = new Method[names.length];
Class aClass = myClass;
for (int i = 0; i < names.length; i++) {
final Method getter = findGetter(aClass, names[i]);
assert getter != null : "Couldn't find getter for property " + names[i] + " in class " + aClass;
methods[i] = getter;
aClass = getter.getReturnType();
if (List.class.isAssignableFrom(aClass)) {
aClass = DomUtil.getRawType(DomImplUtil.extractCollectionElementType(getter.getGenericReturnType()));
}
}
final int lastElement = methods.length - 1;
return new Invocation() {
public final Object invoke(final DomInvocationHandler handler, final Object[] args) throws Throwable {
return invoke(0, handler.getProxy());
}
private Object invoke(final int i, final Object object) throws IllegalAccessException, InvocationTargetException {
final Object o = methods[i].invoke(object);
if (i == lastElement) return o;
if (o instanceof List) {
List result = new ArrayList();
for (Object o1 : (List)o) {
result.add(invoke(i + 1, o1));
}
return result;
}
return invoke(i + 1, o);
}
};
}
@Nullable
private static Method findGetter(Class aClass, String propertyName) {
final String capitalized = StringUtil.capitalize(propertyName);
try {
return aClass.getMethod("get" + capitalized);
}
catch (NoSuchMethodException e) {
final Method method;
try {
method = aClass.getMethod("is" + capitalized);
return DomImplUtil.canHaveIsPropertyGetterPrefix(method.getGenericReturnType()) ? method : null;
}
catch (NoSuchMethodException e1) {
return null;
}
}
}
private static Function<Object[], Type> getTypeGetter(final Method method) {
final Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length >= 1 && parameterTypes[0].equals(Class.class)) {
return new Function<Object[], Type>() {
public Type fun(final Object[] s) {
return (Type)s[0];
}
};
}
if (parameterTypes.length == 2 && parameterTypes[1].equals(Class.class)) {
return new Function<Object[], Type>() {
public Type fun(final Object[] s) {
return (Type)s[1];
}
};
}
return new Function<Object[], Type>() {
public Type fun(final Object[] s) {
return method.getGenericReturnType();
}
};
}
private static Function<Object[], Integer> getIndexGetter(final Method method) {
final Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length >= 1 && parameterTypes[0].equals(int.class)) {
return new Function<Object[], Integer>() {
public Integer fun(final Object[] s) {
return (Integer)s[0];
}
};
}
if (parameterTypes.length == 2 && parameterTypes[1].equals(int.class)) {
return new Function<Object[], Integer>() {
public Integer fun(final Object[] s) {
return (Integer)s[1];
}
};
}
return new Function<Object[], Integer>() {
public Integer fun(final Object[] s) {
return Integer.MAX_VALUE;
}
};
}
@Nullable
private Method findGetterMethod(final Map<JavaMethodSignature, String> map, final String xmlElementName) {
buildMethodMaps();
for (Map.Entry<JavaMethodSignature, String> entry : map.entrySet()) {
if (xmlElementName.equals(entry.getValue())) {
return entry.getKey().findMethod(myClass);
}
}
return null;
}
@Nullable
private Method getCollectionAddMethod(final String tagName, Class... parameterTypes) {
for (Map.Entry<JavaMethodSignature, String> entry : myCollectionChildrenAdditionMethods.entrySet()) {
if (tagName.equals(entry.getValue())) {
final JavaMethodSignature method = entry.getKey();
if (Arrays.equals(parameterTypes, method.getParameterTypes())) {
return method.findMethod(myClass);
}
}
}
return null;
}
private Method[] getFixedChildrenGetterMethods(String tagName) {
final Method[] methods = new Method[getFixedChildrenCount(tagName)];
for (Map.Entry<JavaMethodSignature, Pair<String, Integer>> entry : myFixedChildrenMethods.entrySet()) {
final Pair<String, Integer> pair = entry.getValue();
if (tagName.equals(pair.getFirst())) {
methods[pair.getSecond()] = entry.getKey().findMethod(myClass);
}
}
return methods;
}
@Nullable
public String getElementName(DomElement element) {
if (myNameValueGetter == null) {
return null;
}
final Object o;
try {
o = myNameValueGetter.invoke(element);
return o == null || o instanceof String ? (String)o : ((GenericValue)o).getStringValue();
}
catch (IllegalAccessException e) {
LOG.error(e);
}
catch (InvocationTargetException e) {
LOG.error(e);
}
return null;
}
@NotNull
public List<DomChildrenDescription> getChildrenDescriptions() {
final ArrayList<DomChildrenDescription> result = new ArrayList<DomChildrenDescription>();
result.addAll(getAttributeChildrenDescriptions());
result.addAll(getFixedChildrenDescriptions());
result.addAll(getCollectionChildrenDescriptions());
return result;
}
@NotNull
public List<DomFixedChildDescription> getFixedChildrenDescriptions() {
buildMethodMaps();
final ArrayList<DomFixedChildDescription> result = new ArrayList<DomFixedChildDescription>();
for (String s : myFixedChildrenCounts.keySet()) {
result.add(getFixedChildDescription(s));
}
return result;
}
@NotNull
public List<DomCollectionChildDescription> getCollectionChildrenDescriptions() {
buildMethodMaps();
final ArrayList<DomCollectionChildDescription> result = new ArrayList<DomCollectionChildDescription>();
for (String s : myCollectionChildrenClasses.keySet()) {
result.add(getCollectionChildDescription(s));
}
return result;
}
@Nullable
public DomFixedChildDescription getFixedChildDescription(String tagName) {
buildMethodMaps();
final Method[] getterMethods = getFixedChildrenGetterMethods(tagName);
assert getterMethods.length > 0 : tagName + " " + myClass;
return new FixedChildDescriptionImpl(tagName, getterMethods[0].getGenericReturnType(), getFixedChildrenCount(tagName), getterMethods,
ContainerUtil.map2Array(getterMethods, Boolean.class, new Function<Method, Boolean>() {
public Boolean fun(final Method s) {
return isRequired(s);
}
}));
}
final boolean isRequired(Method method) {
return myRequiredChildrenGetters.contains(JavaMethodSignature.getSignature(method));
}
@Nullable
public DomCollectionChildDescription getCollectionChildDescription(String tagName) {
buildMethodMaps();
final Method getter = findGetterMethod(myCollectionChildrenGetterMethods, tagName);
return new CollectionChildDescriptionImpl(tagName, getCollectionChildrenType(tagName), getCollectionAddMethod(tagName),
getCollectionAddMethod(tagName, Class.class), getter,
getCollectionAddMethod(tagName, int.class),
getCollectionAddMethod(tagName, Class.class, int.class),
getCollectionAddMethod(tagName, int.class, Class.class), isRequired(getter));
}
@Nullable
public DomAttributeChildDescription getAttributeChildDescription(String attributeName) {
final Method getter = findGetterMethod(myAttributeChildrenMethods, attributeName);
if (getter == null) return null;
return new AttributeChildDescriptionImpl(attributeName, getter, isRequired(getter));
}
public boolean isTagValueElement() {
buildMethodMaps();
return myValueElement;
}
@NotNull
public List<DomAttributeChildDescription> getAttributeChildrenDescriptions() {
final ArrayList<DomAttributeChildDescription> result = new ArrayList<DomAttributeChildDescription>();
for (Map.Entry<JavaMethodSignature, String> entry : myAttributeChildrenMethods.entrySet()) {
final Method getter = entry.getKey().findMethod(myClass);
result.add(new AttributeChildDescriptionImpl(entry.getValue(), getter, isRequired(getter)));
}
return result;
}
@Nullable
public DomChildrenDescription getChildDescription(String tagName) {
if (isCollectionChild(tagName)) {
return getCollectionChildDescription(tagName);
}
if (isFixedChild(tagName)) {
return getFixedChildDescription(tagName);
}
return null;
}
final boolean isFixedChild(final String qname) {
return myFixedChildrenCounts.containsKey(qname);
}
final boolean isCollectionChild(final String qname) {
return myCollectionChildrenClasses.containsKey(qname);
}
public static boolean isDomElement(final Type type) {
return type != null && DomElement.class.isAssignableFrom(DomUtil.getRawType(type));
}
}
| source/com/intellij/util/xml/impl/GenericInfoImpl.java | /*
* Copyright (c) 2005 Your Corporation. All Rights Reserved.
*/
package com.intellij.util.xml.impl;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.VirtualFileManager;
import com.intellij.psi.util.PropertyUtil;
import com.intellij.psi.xml.XmlTag;
import com.intellij.util.Function;
import com.intellij.util.containers.BidirectionalMap;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.xml.*;
import com.intellij.util.xml.reflect.*;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.util.*;
/**
* @author peter
*/
public class GenericInfoImpl implements DomGenericInfo {
private static final Logger LOG = Logger.getInstance("#com.intellij.util.xml.impl.GenericInfoImpl");
private final Class<? extends DomElement> myClass;
private DomManagerImpl myDomManager;
private final BidirectionalMap<JavaMethodSignature, Pair<String, Integer>> myFixedChildrenMethods =
new BidirectionalMap<JavaMethodSignature, Pair<String, Integer>>();
private final Map<String, Integer> myFixedChildrenCounts = new HashMap<String, Integer>();
private final Map<JavaMethodSignature, String> myCollectionChildrenGetterMethods = new HashMap<JavaMethodSignature, String>();
private final Map<JavaMethodSignature, String> myCollectionChildrenAdditionMethods = new HashMap<JavaMethodSignature, String>();
private final Map<String, Type> myCollectionChildrenClasses = new HashMap<String, Type>();
private final Map<JavaMethodSignature, String> myAttributeChildrenMethods = new HashMap<JavaMethodSignature, String>();
private final Map<JavaMethodSignature, Set<String>> myCompositeChildrenMethods = new HashMap<JavaMethodSignature, Set<String>>();
private final Map<JavaMethodSignature, Pair<String, Set<String>>> myCompositeCollectionAdditionMethods =
new HashMap<JavaMethodSignature, Pair<String, Set<String>>>();
private final Set<JavaMethodSignature> myRequiredChildrenGetters = new HashSet<JavaMethodSignature>();
@Nullable private Method myNameValueGetter;
private boolean myValueElement;
private boolean myInitialized;
private static final HashSet ADDER_PARAMETER_TYPES = new HashSet<Class>(Arrays.asList(Class.class, int.class));
public GenericInfoImpl(final Class<? extends DomElement> aClass, final DomManagerImpl domManager) {
myClass = aClass;
myDomManager = domManager;
}
final int getFixedChildrenCount(String qname) {
final Integer integer = myFixedChildrenCounts.get(qname);
return integer == null ? 0 : integer;
}
final JavaMethodSignature getFixedChildGetter(final Pair<String, Integer> pair) {
return myFixedChildrenMethods.getKeysByValue(pair).get(0);
}
final Set<Map.Entry<JavaMethodSignature, String>> getCollectionChildrenEntries() {
return myCollectionChildrenGetterMethods.entrySet();
}
final Type getCollectionChildrenType(String tagName) {
return myCollectionChildrenClasses.get(tagName);
}
final Set<Map.Entry<JavaMethodSignature, String>> getAttributeChildrenEntries() {
return myAttributeChildrenMethods.entrySet();
}
final Set<String> getFixedChildrenNames() {
return myFixedChildrenCounts.keySet();
}
final Set<String> getCollectionChildrenNames() {
return myCollectionChildrenClasses.keySet();
}
final Collection<String> getAttributeChildrenNames() {
return myAttributeChildrenMethods.values();
}
final Pair<String, Integer> getFixedChildInfo(JavaMethodSignature method) {
return myFixedChildrenMethods.get(method);
}
final String getAttributeName(JavaMethodSignature method) {
return myAttributeChildrenMethods.get(method);
}
private static boolean isCoreMethod(final Method method) {
final Class<?> aClass = method.getDeclaringClass();
return aClass.isAssignableFrom(DomElement.class) || aClass.equals(GenericAttributeValue.class);
}
@Nullable
private String getSubTagName(final JavaMethodSignature method) {
final SubTag subTagAnnotation = method.findAnnotation(SubTag.class, myClass);
if (subTagAnnotation == null || StringUtil.isEmpty(subTagAnnotation.value())) {
return getNameFromMethod(method, false);
}
return subTagAnnotation.value();
}
@Nullable
private String getSubTagNameForCollection(final JavaMethodSignature method) {
final SubTagList subTagList = method.findAnnotation(SubTagList.class, myClass);
if (subTagList == null || StringUtil.isEmpty(subTagList.value())) {
final String propertyName = getPropertyName(method);
return propertyName != null ? getNameStrategy(false).convertName(StringUtil.unpluralize(propertyName)) : null;
}
return subTagList.value();
}
@Nullable
private String getNameFromMethod(final JavaMethodSignature method, boolean isAttribute) {
final String propertyName = getPropertyName(method);
return propertyName == null ? null : getNameStrategy(isAttribute).convertName(propertyName);
}
private static String getPropertyName(JavaMethodSignature method) {
return PropertyUtil.getPropertyName(method.getMethodName());
}
@NotNull
private DomNameStrategy getNameStrategy(boolean isAttribute) {
final DomNameStrategy strategy = DomImplUtil.getDomNameStrategy(DomUtil.getRawType(myClass));
if (strategy != null) {
return strategy;
}
else {
return isAttribute ? DomNameStrategy.JAVA_STRATEGY : DomNameStrategy.HYPHEN_STRATEGY;
}
}
public final synchronized void buildMethodMaps() {
if (myInitialized) return;
myInitialized = true;
final Set<Method> methods = new HashSet<Method>(Arrays.asList(myClass.getMethods()));
final Set<JavaMethodSignature> removedSignatures = new HashSet<JavaMethodSignature>();
final Class implClass = myDomManager.getImplementation(myClass);
if (implClass != null) {
for (Method method : implClass.getMethods()) {
if (!Modifier.isAbstract(method.getModifiers())) {
removedSignatures.add(JavaMethodSignature.getSignature(method));
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
final Method method = iterator.next();
try {
if (!Modifier.isAbstract(implClass.getMethod(method.getName(), method.getParameterTypes()).getModifiers())) {
iterator.remove();
}
}
catch (NoSuchMethodException e) {
}
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
final Method method = iterator.next();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
if (DomUtil.findAnnotationDFS(method, Required.class) != null) {
myRequiredChildrenGetters.add(signature);
}
if (isCoreMethod(method) || DomImplUtil.isTagValueSetter(method) || isCustomMethod(signature)) {
if (signature.findAnnotation(NameValue.class, myClass) != null) {
myNameValueGetter = method;
}
removedSignatures.add(signature);
iterator.remove();
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
Method method = iterator.next();
if (DomImplUtil.isGetter(method) && processGetterMethod(method)) {
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
if (signature.findAnnotation(NameValue.class, myClass) != null) {
myNameValueGetter = method;
}
removedSignatures.add(signature);
iterator.remove();
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
Method method = iterator.next();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
final SubTagsList subTagsList = signature.findAnnotation(SubTagsList.class, myClass);
if (subTagsList != null && method.getName().startsWith("add")) {
final String tagName = subTagsList.tagName();
assert StringUtil.isNotEmpty(tagName);
final Set<String> set = new HashSet<String>(Arrays.asList(subTagsList.value()));
assert set.contains(tagName);
myCompositeCollectionAdditionMethods.put(signature, Pair.create(tagName, set));
iterator.remove();
}
else if (isAddMethod(method, signature)) {
myCollectionChildrenAdditionMethods.put(signature, extractTagName(signature, "add"));
removedSignatures.add(JavaMethodSignature.getSignature(method));
iterator.remove();
}
}
for (Iterator<Method> iterator = methods.iterator(); iterator.hasNext();) {
Method method = iterator.next();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
if (removedSignatures.contains(signature)) {
iterator.remove();
}
}
if (false) {
if (!methods.isEmpty()) {
StringBuilder sb = new StringBuilder(myClass + " should provide the following implementations:");
for (Method method : methods) {
sb.append("\n " + method);
}
assert false : sb.toString();
//System.out.println(sb.toString());
}
}
}
private boolean isAddMethod(Method method, JavaMethodSignature signature) {
final String tagName = extractTagName(signature, "add");
if (tagName == null) return false;
final Type childrenClass = getCollectionChildrenType(tagName);
if (childrenClass == null || !DomUtil.getRawType(childrenClass).isAssignableFrom(method.getReturnType())) return false;
return ADDER_PARAMETER_TYPES.containsAll(Arrays.asList(method.getParameterTypes()));
}
@Nullable
private String extractTagName(JavaMethodSignature method, @NonNls String prefix) {
final String name = method.getMethodName();
if (!name.startsWith(prefix)) return null;
final SubTagList subTagAnnotation = method.findAnnotation(SubTagList.class, myClass);
if (subTagAnnotation != null && !StringUtil.isEmpty(subTagAnnotation.value())) {
return subTagAnnotation.value();
}
final String tagName = getNameStrategy(false).convertName(name.substring(prefix.length()));
return StringUtil.isEmpty(tagName) ? null : tagName;
}
private boolean processGetterMethod(final Method method) {
if (DomImplUtil.isTagValueGetter(method)) {
myValueElement = true;
return true;
}
final boolean isAttributeValueMethod = method.getReturnType().equals(GenericAttributeValue.class);
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
final Attribute annotation = signature.findAnnotation(Attribute.class, myClass);
final boolean isAttributeMethod = annotation != null || isAttributeValueMethod;
if (annotation != null) {
assert
isAttributeValueMethod || method.getReturnType().isAssignableFrom(GenericAttributeValue.class) :
method + " should return " + GenericAttributeValue.class;
}
if (isAttributeMethod) {
final String s = annotation == null ? null : annotation.value();
String attributeName = StringUtil.isEmpty(s) ? getNameFromMethod(signature, true) : s;
assert StringUtil.isNotEmpty(attributeName) : "Can't guess attribute name from method name: " + method.getName();
myAttributeChildrenMethods.put(signature, attributeName);
return true;
}
if (isDomElement(method.getReturnType())) {
final String qname = getSubTagName(signature);
if (qname != null) {
assert!isCollectionChild(qname) : "Collection and fixed children cannot intersect: " + qname;
int index = 0;
final SubTag subTagAnnotation = signature.findAnnotation(SubTag.class, myClass);
if (subTagAnnotation != null && subTagAnnotation.index() != 0) {
index = subTagAnnotation.index();
}
myFixedChildrenMethods.put(signature, new Pair<String, Integer>(qname, index));
final Integer integer = myFixedChildrenCounts.get(qname);
if (integer == null || integer < index + 1) {
myFixedChildrenCounts.put(qname, index + 1);
}
return true;
}
}
final Type type = DomImplUtil.extractCollectionElementType(method.getGenericReturnType());
if (isDomElement(type)) {
final SubTagsList subTagsList = method.getAnnotation(SubTagsList.class);
if (subTagsList != null) {
myCompositeChildrenMethods.put(signature, new HashSet<String>(Arrays.asList(subTagsList.value())));
return true;
}
final String qname = getSubTagNameForCollection(signature);
if (qname != null) {
assert!isFixedChild(qname) : "Collection and fixed children cannot intersect: " + qname;
myCollectionChildrenClasses.put(qname, type);
myCollectionChildrenGetterMethods.put(signature, qname);
return true;
}
}
return false;
}
private boolean isCustomMethod(final JavaMethodSignature method) {
return method.findAnnotation(PropertyAccessor.class, myClass) != null;
}
public final Invocation createInvocation(final Method method) {
buildMethodMaps();
final JavaMethodSignature signature = JavaMethodSignature.getSignature(method);
final PropertyAccessor accessor = signature.findAnnotation(PropertyAccessor.class, myClass);
if (accessor != null) {
return createPropertyAccessorInvocation(accessor);
}
if (myAttributeChildrenMethods.containsKey(signature)) {
return new GetAttributeChildInvocation(signature);
}
if (myFixedChildrenMethods.containsKey(signature)) {
return new GetFixedChildInvocation(signature);
}
final Set<String> qnames = myCompositeChildrenMethods.get(signature);
if (qnames != null) {
return new Invocation() {
public Object invoke(final DomInvocationHandler handler, final Object[] args) throws Throwable {
for (final String qname : qnames) {
handler.checkInitialized(qname);
}
final XmlTag tag = handler.getXmlTag();
if (tag == null) return Collections.emptyList();
final List<DomElement> list = new ArrayList<DomElement>();
for (final XmlTag subTag : tag.getSubTags()) {
if (qnames.contains(subTag.getLocalName())) {
final DomInvocationHandler element = DomManagerImpl.getCachedElement(subTag);
if (element != null) {
list.add(element.getProxy());
}
}
}
return list;
}
};
}
final Pair<String, Set<String>> pair = myCompositeCollectionAdditionMethods.get(signature);
if (pair != null) {
final Set<String> qnames1 = pair.second;
final String tagName = pair.first;
final Type type = method.getGenericReturnType();
return new Invocation() {
public Object invoke(final DomInvocationHandler handler, final Object[] args) throws Throwable {
final VirtualFile virtualFile = handler.getFile().getVirtualFile();
if (virtualFile != null && !virtualFile.isWritable()) {
VirtualFileManager.getInstance().fireReadOnlyModificationAttempt(virtualFile);
return null;
}
for (final String qname : qnames1) {
handler.checkInitialized(qname);
}
final XmlTag tag = handler.ensureTagExists();
int index = args != null && args.length == 1 ? (Integer)args[0] : Integer.MAX_VALUE;
XmlTag lastTag = null;
int i = 0;
final XmlTag[] tags = tag.getSubTags();
for (final XmlTag subTag : tags) {
if (i == index) break;
if (qnames1.contains(subTag.getLocalName())) {
final DomInvocationHandler element = DomManagerImpl.getCachedElement(subTag);
if (element != null) {
lastTag = subTag;
i++;
}
}
}
final DomManagerImpl manager = handler.getManager();
final boolean b = manager.setChanging(true);
try {
final XmlTag emptyTag = tag.getManager().getElementFactory().createTagFromText("<" + tagName + "/>");
final XmlTag newTag;
if (lastTag == null) {
if (tags.length == 0) {
newTag = (XmlTag)tag.add(emptyTag);
}
else {
newTag = (XmlTag)tag.addBefore(emptyTag, tags[0]);
}
}
else {
newTag = (XmlTag)tag.addAfter(emptyTag, lastTag);
}
return handler.createCollectionElement(type, newTag);
}
finally {
manager.setChanging(b);
}
}
};
}
String qname = myCollectionChildrenGetterMethods.get(signature);
if (qname != null) {
return new GetCollectionChildInvocation(qname);
}
qname = myCollectionChildrenAdditionMethods.get(signature);
if (qname != null) {
return new AddChildInvocation(getTypeGetter(method), getIndexGetter(method), qname, myCollectionChildrenClasses.get(qname));
}
throw new UnsupportedOperationException("No implementation for method " + method.toString() + " in class " + myClass);
}
private Invocation createPropertyAccessorInvocation(final PropertyAccessor accessor) {
final String[] names = accessor.value();
final Method[] methods = new Method[names.length];
Class aClass = myClass;
for (int i = 0; i < names.length; i++) {
final Method getter = findGetter(aClass, names[i]);
assert getter != null : "Couldn't find getter for property " + names[i] + " in class " + aClass;
methods[i] = getter;
aClass = getter.getReturnType();
if (List.class.isAssignableFrom(aClass)) {
aClass = DomUtil.getRawType(DomImplUtil.extractCollectionElementType(getter.getGenericReturnType()));
}
}
final int lastElement = methods.length - 1;
return new Invocation() {
public final Object invoke(final DomInvocationHandler handler, final Object[] args) throws Throwable {
return invoke(0, handler.getProxy());
}
private Object invoke(final int i, final Object object) throws IllegalAccessException, InvocationTargetException {
final Object o = methods[i].invoke(object);
if (i == lastElement) return o;
if (o instanceof List) {
List result = new ArrayList();
for (Object o1 : (List)o) {
result.add(invoke(i + 1, o1));
}
return result;
}
return invoke(i + 1, o);
}
};
}
@Nullable
private static Method findGetter(Class aClass, String propertyName) {
final String capitalized = StringUtil.capitalize(propertyName);
try {
return aClass.getMethod("get" + capitalized);
}
catch (NoSuchMethodException e) {
final Method method;
try {
method = aClass.getMethod("is" + capitalized);
return DomImplUtil.canHaveIsPropertyGetterPrefix(method.getGenericReturnType()) ? method : null;
}
catch (NoSuchMethodException e1) {
return null;
}
}
}
private static Function<Object[], Type> getTypeGetter(final Method method) {
final Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length >= 1 && parameterTypes[0].equals(Class.class)) {
return new Function<Object[], Type>() {
public Type fun(final Object[] s) {
return (Type)s[0];
}
};
}
if (parameterTypes.length == 2 && parameterTypes[1].equals(Class.class)) {
return new Function<Object[], Type>() {
public Type fun(final Object[] s) {
return (Type)s[1];
}
};
}
return new Function<Object[], Type>() {
public Type fun(final Object[] s) {
return method.getGenericReturnType();
}
};
}
private static Function<Object[], Integer> getIndexGetter(final Method method) {
final Class<?>[] parameterTypes = method.getParameterTypes();
if (parameterTypes.length >= 1 && parameterTypes[0].equals(int.class)) {
return new Function<Object[], Integer>() {
public Integer fun(final Object[] s) {
return (Integer)s[0];
}
};
}
if (parameterTypes.length == 2 && parameterTypes[1].equals(int.class)) {
return new Function<Object[], Integer>() {
public Integer fun(final Object[] s) {
return (Integer)s[1];
}
};
}
return new Function<Object[], Integer>() {
public Integer fun(final Object[] s) {
return Integer.MAX_VALUE;
}
};
}
@Nullable
private Method findGetterMethod(final Map<JavaMethodSignature, String> map, final String xmlElementName) {
buildMethodMaps();
for (Map.Entry<JavaMethodSignature, String> entry : map.entrySet()) {
if (xmlElementName.equals(entry.getValue())) {
return entry.getKey().findMethod(myClass);
}
}
return null;
}
@Nullable
private Method getCollectionAddMethod(final String tagName, Class... parameterTypes) {
for (Map.Entry<JavaMethodSignature, String> entry : myCollectionChildrenAdditionMethods.entrySet()) {
if (tagName.equals(entry.getValue())) {
final JavaMethodSignature method = entry.getKey();
if (Arrays.equals(parameterTypes, method.getParameterTypes())) {
return method.findMethod(myClass);
}
}
}
return null;
}
private Method[] getFixedChildrenGetterMethods(String tagName) {
final Method[] methods = new Method[getFixedChildrenCount(tagName)];
for (Map.Entry<JavaMethodSignature, Pair<String, Integer>> entry : myFixedChildrenMethods.entrySet()) {
final Pair<String, Integer> pair = entry.getValue();
if (tagName.equals(pair.getFirst())) {
methods[pair.getSecond()] = entry.getKey().findMethod(myClass);
}
}
return methods;
}
@Nullable
public String getElementName(DomElement element) {
if (myNameValueGetter == null) {
return null;
}
final Object o;
try {
o = myNameValueGetter.invoke(element);
return o == null || o instanceof String ? (String)o : ((GenericValue)o).getStringValue();
}
catch (IllegalAccessException e) {
LOG.error(e);
}
catch (InvocationTargetException e) {
LOG.error(e);
}
return null;
}
@NotNull
public List<DomChildrenDescription> getChildrenDescriptions() {
final ArrayList<DomChildrenDescription> result = new ArrayList<DomChildrenDescription>();
result.addAll(getAttributeChildrenDescriptions());
result.addAll(getFixedChildrenDescriptions());
result.addAll(getCollectionChildrenDescriptions());
return result;
}
@NotNull
public List<DomFixedChildDescription> getFixedChildrenDescriptions() {
buildMethodMaps();
final ArrayList<DomFixedChildDescription> result = new ArrayList<DomFixedChildDescription>();
for (String s : myFixedChildrenCounts.keySet()) {
result.add(getFixedChildDescription(s));
}
return result;
}
@NotNull
public List<DomCollectionChildDescription> getCollectionChildrenDescriptions() {
buildMethodMaps();
final ArrayList<DomCollectionChildDescription> result = new ArrayList<DomCollectionChildDescription>();
for (String s : myCollectionChildrenClasses.keySet()) {
result.add(getCollectionChildDescription(s));
}
return result;
}
@Nullable
public DomFixedChildDescription getFixedChildDescription(String tagName) {
buildMethodMaps();
final Method[] getterMethods = getFixedChildrenGetterMethods(tagName);
assert getterMethods.length > 0 : tagName + " " + myClass;
return new FixedChildDescriptionImpl(tagName, getterMethods[0].getGenericReturnType(), getFixedChildrenCount(tagName), getterMethods,
ContainerUtil.map2Array(getterMethods, Boolean.class, new Function<Method, Boolean>() {
public Boolean fun(final Method s) {
return isRequired(s);
}
}));
}
final boolean isRequired(Method method) {
return myRequiredChildrenGetters.contains(JavaMethodSignature.getSignature(method));
}
@Nullable
public DomCollectionChildDescription getCollectionChildDescription(String tagName) {
buildMethodMaps();
final Method getter = findGetterMethod(myCollectionChildrenGetterMethods, tagName);
return new CollectionChildDescriptionImpl(tagName, getCollectionChildrenType(tagName), getCollectionAddMethod(tagName),
getCollectionAddMethod(tagName, Class.class), getter,
getCollectionAddMethod(tagName, int.class),
getCollectionAddMethod(tagName, Class.class, int.class),
getCollectionAddMethod(tagName, int.class, Class.class), isRequired(getter));
}
@Nullable
public DomAttributeChildDescription getAttributeChildDescription(String attributeName) {
final Method getter = findGetterMethod(myAttributeChildrenMethods, attributeName);
if (getter == null) return null;
return new AttributeChildDescriptionImpl(attributeName, getter, isRequired(getter));
}
public boolean isTagValueElement() {
buildMethodMaps();
return myValueElement;
}
@NotNull
public List<DomAttributeChildDescription> getAttributeChildrenDescriptions() {
final ArrayList<DomAttributeChildDescription> result = new ArrayList<DomAttributeChildDescription>();
for (Map.Entry<JavaMethodSignature, String> entry : myAttributeChildrenMethods.entrySet()) {
final Method getter = entry.getKey().findMethod(myClass);
result.add(new AttributeChildDescriptionImpl(entry.getValue(), getter, isRequired(getter)));
}
return result;
}
@Nullable
public DomChildrenDescription getChildDescription(String tagName) {
if (isCollectionChild(tagName)) {
return getCollectionChildDescription(tagName);
}
if (isFixedChild(tagName)) {
return getFixedChildDescription(tagName);
}
return null;
}
final boolean isFixedChild(final String qname) {
return myFixedChildrenCounts.containsKey(qname);
}
final boolean isCollectionChild(final String qname) {
return myCollectionChildrenClasses.containsKey(qname);
}
public static boolean isDomElement(final Type type) {
return type != null && DomElement.class.isAssignableFrom(DomUtil.getRawType(type));
}
}
| default name strategy for xml attributes rolled back
| source/com/intellij/util/xml/impl/GenericInfoImpl.java | default name strategy for xml attributes rolled back |
|
Java | apache-2.0 | e9e088e32725deaae300fabb84ff8b65f1a8a055 | 0 | Nerdwin15/wildfly-rewriter,Nerdwin15/wildfly-rewriter | /*
* File created on May 28, 2014
*
* Copyright 2013-2014 Nerdwin15, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.nerdwin15.wildfly.rewriter.web.repo;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import com.nerdwin15.wildfly.rewriter.web.RewriteRule;
import com.nerdwin15.wildfly.rewriter.web.RuleModel;
import com.nerdwin15.wildfly.rewriter.web.domain.RewriteRuleEntity;
/**
* Implementation of the {@link RuleRepository} that uses JPA.
*
* @author Michael Irwin
*/
@ApplicationScoped
public class JpaRuleRepository implements RuleRepository {
@PersistenceContext
private EntityManager entityManager;
/**
* {@inheritDoc}
*/
@Override
public RewriteRule createRule(RuleModel ruleModel) {
RewriteRuleEntity entity = new RewriteRuleEntity();
entity.setFrom(ruleModel.getFrom());
entity.setTo(ruleModel.getTo());
entityManager.persist(entity);
return entity;
}
/**
* {@inheritDoc}
*/
@Override
public List<RewriteRule> retrieveAllRules() {
List<RewriteRule> rules = entityManager
.createNamedQuery("retrieveAllRules", RewriteRule.class)
.getResultList();
return rules;
}
/**
* {@inheritDoc}
*/
@Override
public void deleteRule(Long ruleId) {
entityManager
.createNamedQuery("deleteRuleById")
.setParameter("id", ruleId)
.executeUpdate();
}
}
| wildfly-rewriter-webapp/src/main/java/com/nerdwin15/wildfly/rewriter/web/repo/JpaRuleRepository.java | /*
* File created on May 28, 2014
*
* Copyright 2013-2014 Nerdwin15, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package com.nerdwin15.wildfly.rewriter.web.repo;
import java.util.List;
import javax.enterprise.context.ApplicationScoped;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import com.nerdwin15.wildfly.rewriter.web.RewriteRule;
/**
* Implementation of the {@link RuleRepository} that uses JPA.
*
* @author Michael Irwin
*/
@ApplicationScoped
public class JpaRuleRepository implements RuleRepository {
@PersistenceContext
private EntityManager entityManager;
/**
* {@inheritDoc}
*/
@Override
public List<RewriteRule> retrieveAllRules() {
List<RewriteRule> rules = entityManager
.createNamedQuery("retrieveAllRules", RewriteRule.class)
.getResultList();
return rules;
}
/**
* {@inheritDoc}
*/
@Override
public void deleteRule(Long ruleId) {
entityManager
.createNamedQuery("deleteRuleById")
.setParameter("id", ruleId)
.executeUpdate();
}
}
| Added repository implementation of createRule | wildfly-rewriter-webapp/src/main/java/com/nerdwin15/wildfly/rewriter/web/repo/JpaRuleRepository.java | Added repository implementation of createRule |
|
Java | apache-2.0 | 667c8dab98fe5553c458e7263870c8dced66c45d | 0 | idea4bsd/idea4bsd,michaelgallacher/intellij-community,fitermay/intellij-community,ibinti/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,asedunov/intellij-community,ThiagoGarciaAlves/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,signed/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,fitermay/intellij-community,signed/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,semonte/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,signed/intellij-community,youdonghai/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,michaelgallacher/intellij-community,youdonghai/intellij-community,allotria/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,signed/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,signed/intellij-community,fitermay/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,signed/intellij-community,hurricup/intellij-community,youdonghai/intellij-community,FHannes/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,asedunov/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,hurricup/intellij-community,da1z/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,mglukhikh/intellij-community,apixandru/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,semonte/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,apixandru/intellij-community,signed/intellij-community,ibinti/intellij-community,da1z/intellij-community,fitermay/intellij-community,apixandru/intellij-community,hurricup/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,FHannes/intellij-community,da1z/intellij-community,apixandru/intellij-community,da1z/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,xfournet/intellij-community,semonte/intellij-community,hurricup/intellij-community,suncycheng/intellij-community,da1z/intellij-community,vvv1559/intellij-community,semonte/intellij-community,signed/intellij-community,asedunov/intellij-community,FHannes/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,da1z/intellij-community,ibinti/intellij-community,hurricup/intellij-community,ibinti/intellij-community,asedunov/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,FHannes/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,ibinti/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,FHannes/intellij-community,asedunov/intellij-community,semonte/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,allotria/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,da1z/intellij-community,asedunov/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,semonte/intellij-community,suncycheng/intellij-community,apixandru/intellij-community,da1z/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,idea4bsd/idea4bsd,idea4bsd/idea4bsd,idea4bsd/idea4bsd,vvv1559/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,xfournet/intellij-community,asedunov/intellij-community,FHannes/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,FHannes/intellij-community,asedunov/intellij-community,signed/intellij-community,da1z/intellij-community,suncycheng/intellij-community,semonte/intellij-community,apixandru/intellij-community,semonte/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,signed/intellij-community,suncycheng/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,youdonghai/intellij-community,semonte/intellij-community,vvv1559/intellij-community,semonte/intellij-community,asedunov/intellij-community,apixandru/intellij-community,ibinti/intellij-community,fitermay/intellij-community,asedunov/intellij-community,allotria/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,suncycheng/intellij-community,ibinti/intellij-community,ibinti/intellij-community,ibinti/intellij-community,da1z/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,fitermay/intellij-community,ibinti/intellij-community,hurricup/intellij-community,fitermay/intellij-community,hurricup/intellij-community,allotria/intellij-community,FHannes/intellij-community,youdonghai/intellij-community,signed/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,apixandru/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,vvv1559/intellij-community,semonte/intellij-community,mglukhikh/intellij-community,apixandru/intellij-community,apixandru/intellij-community,youdonghai/intellij-community,signed/intellij-community,suncycheng/intellij-community,allotria/intellij-community | package com.intellij.codeInsight.editorActions.fillParagraph;
import com.intellij.formatting.FormatterTagHandler;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.UnfairTextRange;
import com.intellij.openapi.util.text.CharFilter;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.codeStyle.CodeFormatterFacade;
import com.intellij.psi.tree.IElementType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* Defines general re-flow paragraph functionality.
* Serves plain text files.
*
* User : ktisha
*/
public class ParagraphFillHandler {
protected void performOnElement(@NotNull final PsiElement element, @NotNull final Editor editor) {
final Document document = editor.getDocument();
final TextRange textRange = getTextRange(element, editor);
if (textRange.isEmpty()) return;
final String text = textRange.substring(element.getContainingFile().getText());
final List<String> subStrings = StringUtil.split(text, "\n", true);
final String prefix = getPrefix(element);
final String postfix = getPostfix(element);
final StringBuilder stringBuilder = new StringBuilder();
appendPrefix(element, text, stringBuilder);
for (String string : subStrings) {
final String startTrimmed = StringUtil.trimStart(string.trim(), prefix.trim());
final String str = StringUtil.trimEnd(startTrimmed, postfix.trim());
final String finalString = str.trim();
if (!StringUtil.isEmptyOrSpaces(finalString))
stringBuilder.append(finalString).append(" ");
}
appendPostfix(element, text, stringBuilder);
final String replacementText = stringBuilder.toString();
CommandProcessor.getInstance().executeCommand(element.getProject(), () -> {
document.replaceString(textRange.getStartOffset(), textRange.getEndOffset(),
replacementText);
final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(
CodeStyleSettingsManager.getSettings(element.getProject()), element.getLanguage());
final PsiFile file = element.getContainingFile();
FormatterTagHandler formatterTagHandler = new FormatterTagHandler(CodeStyleSettingsManager.getSettings(file.getProject()));
List<TextRange> enabledRanges = formatterTagHandler.getEnabledRanges(file.getNode(), TextRange.create(0, document.getTextLength()));
codeFormatter.doWrapLongLinesIfNecessary(editor, element.getProject(), document,
textRange.getStartOffset(),
textRange.getStartOffset() + replacementText.length() + 1,
enabledRanges);
}, null, document);
}
protected void appendPostfix(@NotNull final PsiElement element,
@NotNull final String text,
@NotNull final StringBuilder stringBuilder) {
final String postfix = getPostfix(element);
if (text.endsWith(postfix.trim()))
stringBuilder.append(postfix);
}
protected void appendPrefix(@NotNull final PsiElement element,
@NotNull final String text,
@NotNull final StringBuilder stringBuilder) {
final String prefix = getPrefix(element);
if (text.startsWith(prefix.trim()))
stringBuilder.append(prefix);
}
private TextRange getTextRange(@NotNull final PsiElement element, @NotNull final Editor editor) {
int startOffset = getStartOffset(element, editor);
int endOffset = getEndOffset(element, editor);
return new UnfairTextRange(startOffset, endOffset);
}
private int getStartOffset(@NotNull final PsiElement element, @NotNull final Editor editor) {
if (isBunchOfElement(element)) {
final PsiElement firstElement = getFirstElement(element);
return firstElement != null? firstElement.getTextRange().getStartOffset()
: element.getTextRange().getStartOffset();
}
final int offset = editor.getCaretModel().getOffset();
final int elementTextOffset = element.getTextOffset();
final Document document = editor.getDocument();
int lineNumber = document.getLineNumber(offset);
while (lineNumber != document.getLineNumber(elementTextOffset)) {
final String text = document.getText(TextRange.create(document.getLineStartOffset(lineNumber),
document.getLineEndOffset(lineNumber)));
if (StringUtil.isEmptyOrSpaces(text)) {
lineNumber += 1;
break;
}
lineNumber -= 1;
}
final int lineStartOffset = lineNumber == document.getLineNumber(elementTextOffset) ? elementTextOffset : document.getLineStartOffset(lineNumber);
final String lineText = document
.getText(TextRange.create(lineStartOffset, document.getLineEndOffset(lineNumber)));
int shift = StringUtil.findFirst(lineText, CharFilter.NOT_WHITESPACE_FILTER);
return lineStartOffset + shift;
}
protected boolean isBunchOfElement(PsiElement element) {
return element instanceof PsiComment;
}
private int getEndOffset(@NotNull final PsiElement element, @NotNull final Editor editor) {
if (isBunchOfElement(element)) {
final PsiElement next = getLastElement(element);
return next != null? next.getTextRange().getEndOffset()
: element.getTextRange().getEndOffset();
}
final int offset = editor.getCaretModel().getOffset();
final int elementTextOffset = element.getTextRange().getEndOffset();
final Document document = editor.getDocument();
int lineNumber = document.getLineNumber(offset);
while (lineNumber != document.getLineNumber(elementTextOffset)) {
final String text = document.getText(TextRange.create(document.getLineStartOffset(lineNumber),
document.getLineEndOffset(lineNumber)));
if (StringUtil.isEmptyOrSpaces(text)) {
lineNumber -= 1;
break;
}
lineNumber += 1;
}
return document.getLineEndOffset(lineNumber);
}
@Nullable
private PsiElement getFirstElement(@NotNull final PsiElement element) {
final IElementType elementType = element.getNode().getElementType();
PsiElement prevSibling = element.getPrevSibling();
PsiElement result = element;
while (prevSibling != null && (prevSibling.getNode().getElementType().equals(elementType) ||
(atWhitespaceToken(prevSibling) &&
StringUtil.countChars(prevSibling.getText(), '\n') <= 1))) {
String text = prevSibling.getText();
final String prefix = getPrefix(element);
final String postfix = getPostfix(element);
text = StringUtil.trimStart(text.trim(), prefix.trim());
text = StringUtil.trimEnd(text, postfix);
if (prevSibling.getNode().getElementType().equals(elementType) &&
StringUtil.isEmptyOrSpaces(text)) {
break;
}
if (prevSibling.getNode().getElementType().equals(elementType))
result = prevSibling;
prevSibling = prevSibling.getPrevSibling();
}
return result;
}
@Nullable
private PsiElement getLastElement(@NotNull final PsiElement element) {
final IElementType elementType = element.getNode().getElementType();
PsiElement nextSibling = element.getNextSibling();
PsiElement result = element;
while (nextSibling != null && (nextSibling.getNode().getElementType().equals(elementType) ||
(atWhitespaceToken(nextSibling) &&
StringUtil.countChars(nextSibling.getText(), '\n') <= 1))) {
String text = nextSibling.getText();
final String prefix = getPrefix(element);
final String postfix = getPostfix(element);
text = StringUtil.trimStart(text.trim(), prefix.trim());
text = StringUtil.trimEnd(text, postfix);
if (nextSibling.getNode().getElementType().equals(elementType) &&
StringUtil.isEmptyOrSpaces(text)) {
break;
}
if (nextSibling.getNode().getElementType().equals(elementType))
result = nextSibling;
nextSibling = nextSibling.getNextSibling();
}
return result;
}
protected boolean atWhitespaceToken(@Nullable final PsiElement element) {
return element instanceof PsiWhiteSpace;
}
protected boolean isAvailableForElement(@Nullable final PsiElement element) {
return element != null;
}
protected boolean isAvailableForFile(@Nullable final PsiFile psiFile) {
return psiFile instanceof PsiPlainTextFile;
}
@NotNull
protected String getPrefix(@NotNull final PsiElement element) {
return "";
}
@NotNull
protected String getPostfix(@NotNull final PsiElement element) {
return "";
}
}
| platform/lang-impl/src/com/intellij/codeInsight/editorActions/fillParagraph/ParagraphFillHandler.java | package com.intellij.codeInsight.editorActions.fillParagraph;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.UnfairTextRange;
import com.intellij.openapi.util.text.CharFilter;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.impl.source.codeStyle.CodeFormatterFacade;
import com.intellij.psi.tree.IElementType;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
/**
* Defines general re-flow paragraph functionality.
* Serves plain text files.
*
* User : ktisha
*/
public class ParagraphFillHandler {
protected void performOnElement(@NotNull final PsiElement element, @NotNull final Editor editor) {
final Document document = editor.getDocument();
final TextRange textRange = getTextRange(element, editor);
if (textRange.isEmpty()) return;
final String text = textRange.substring(element.getContainingFile().getText());
final List<String> subStrings = StringUtil.split(text, "\n", true);
final String prefix = getPrefix(element);
final String postfix = getPostfix(element);
final StringBuilder stringBuilder = new StringBuilder();
appendPrefix(element, text, stringBuilder);
for (String string : subStrings) {
final String startTrimmed = StringUtil.trimStart(string.trim(), prefix.trim());
final String str = StringUtil.trimEnd(startTrimmed, postfix.trim());
final String finalString = str.trim();
if (!StringUtil.isEmptyOrSpaces(finalString))
stringBuilder.append(finalString).append(" ");
}
appendPostfix(element, text, stringBuilder);
final String replacementText = stringBuilder.toString();
CommandProcessor.getInstance().executeCommand(element.getProject(), () -> {
document.replaceString(textRange.getStartOffset(), textRange.getEndOffset(),
replacementText);
final CodeFormatterFacade codeFormatter = new CodeFormatterFacade(
CodeStyleSettingsManager.getSettings(element.getProject()), element.getLanguage());
codeFormatter.doWrapLongLinesIfNecessary(editor, element.getProject(), document,
textRange.getStartOffset(),
textRange.getStartOffset() + replacementText.length() + 1, Collections.emptyList());
}, null, document);
}
protected void appendPostfix(@NotNull final PsiElement element,
@NotNull final String text,
@NotNull final StringBuilder stringBuilder) {
final String postfix = getPostfix(element);
if (text.endsWith(postfix.trim()))
stringBuilder.append(postfix);
}
protected void appendPrefix(@NotNull final PsiElement element,
@NotNull final String text,
@NotNull final StringBuilder stringBuilder) {
final String prefix = getPrefix(element);
if (text.startsWith(prefix.trim()))
stringBuilder.append(prefix);
}
private TextRange getTextRange(@NotNull final PsiElement element, @NotNull final Editor editor) {
int startOffset = getStartOffset(element, editor);
int endOffset = getEndOffset(element, editor);
return new UnfairTextRange(startOffset, endOffset);
}
private int getStartOffset(@NotNull final PsiElement element, @NotNull final Editor editor) {
if (isBunchOfElement(element)) {
final PsiElement firstElement = getFirstElement(element);
return firstElement != null? firstElement.getTextRange().getStartOffset()
: element.getTextRange().getStartOffset();
}
final int offset = editor.getCaretModel().getOffset();
final int elementTextOffset = element.getTextOffset();
final Document document = editor.getDocument();
int lineNumber = document.getLineNumber(offset);
while (lineNumber != document.getLineNumber(elementTextOffset)) {
final String text = document.getText(TextRange.create(document.getLineStartOffset(lineNumber),
document.getLineEndOffset(lineNumber)));
if (StringUtil.isEmptyOrSpaces(text)) {
lineNumber += 1;
break;
}
lineNumber -= 1;
}
final int lineStartOffset = lineNumber == document.getLineNumber(elementTextOffset) ? elementTextOffset : document.getLineStartOffset(lineNumber);
final String lineText = document
.getText(TextRange.create(lineStartOffset, document.getLineEndOffset(lineNumber)));
int shift = StringUtil.findFirst(lineText, CharFilter.NOT_WHITESPACE_FILTER);
return lineStartOffset + shift;
}
protected boolean isBunchOfElement(PsiElement element) {
return element instanceof PsiComment;
}
private int getEndOffset(@NotNull final PsiElement element, @NotNull final Editor editor) {
if (isBunchOfElement(element)) {
final PsiElement next = getLastElement(element);
return next != null? next.getTextRange().getEndOffset()
: element.getTextRange().getEndOffset();
}
final int offset = editor.getCaretModel().getOffset();
final int elementTextOffset = element.getTextRange().getEndOffset();
final Document document = editor.getDocument();
int lineNumber = document.getLineNumber(offset);
while (lineNumber != document.getLineNumber(elementTextOffset)) {
final String text = document.getText(TextRange.create(document.getLineStartOffset(lineNumber),
document.getLineEndOffset(lineNumber)));
if (StringUtil.isEmptyOrSpaces(text)) {
lineNumber -= 1;
break;
}
lineNumber += 1;
}
return document.getLineEndOffset(lineNumber);
}
@Nullable
private PsiElement getFirstElement(@NotNull final PsiElement element) {
final IElementType elementType = element.getNode().getElementType();
PsiElement prevSibling = element.getPrevSibling();
PsiElement result = element;
while (prevSibling != null && (prevSibling.getNode().getElementType().equals(elementType) ||
(atWhitespaceToken(prevSibling) &&
StringUtil.countChars(prevSibling.getText(), '\n') <= 1))) {
String text = prevSibling.getText();
final String prefix = getPrefix(element);
final String postfix = getPostfix(element);
text = StringUtil.trimStart(text.trim(), prefix.trim());
text = StringUtil.trimEnd(text, postfix);
if (prevSibling.getNode().getElementType().equals(elementType) &&
StringUtil.isEmptyOrSpaces(text)) {
break;
}
if (prevSibling.getNode().getElementType().equals(elementType))
result = prevSibling;
prevSibling = prevSibling.getPrevSibling();
}
return result;
}
@Nullable
private PsiElement getLastElement(@NotNull final PsiElement element) {
final IElementType elementType = element.getNode().getElementType();
PsiElement nextSibling = element.getNextSibling();
PsiElement result = element;
while (nextSibling != null && (nextSibling.getNode().getElementType().equals(elementType) ||
(atWhitespaceToken(nextSibling) &&
StringUtil.countChars(nextSibling.getText(), '\n') <= 1))) {
String text = nextSibling.getText();
final String prefix = getPrefix(element);
final String postfix = getPostfix(element);
text = StringUtil.trimStart(text.trim(), prefix.trim());
text = StringUtil.trimEnd(text, postfix);
if (nextSibling.getNode().getElementType().equals(elementType) &&
StringUtil.isEmptyOrSpaces(text)) {
break;
}
if (nextSibling.getNode().getElementType().equals(elementType))
result = nextSibling;
nextSibling = nextSibling.getNextSibling();
}
return result;
}
protected boolean atWhitespaceToken(@Nullable final PsiElement element) {
return element instanceof PsiWhiteSpace;
}
protected boolean isAvailableForElement(@Nullable final PsiElement element) {
return element != null;
}
protected boolean isAvailableForFile(@Nullable final PsiFile psiFile) {
return psiFile instanceof PsiPlainTextFile;
}
@NotNull
protected String getPrefix(@NotNull final PsiElement element) {
return "";
}
@NotNull
protected String getPostfix(@NotNull final PsiElement element) {
return "";
}
}
| fixed tests
| platform/lang-impl/src/com/intellij/codeInsight/editorActions/fillParagraph/ParagraphFillHandler.java | fixed tests |
|
Java | apache-2.0 | 03bb057986519a84915386206f7b4a5d8caec91f | 0 | Governance/overlord-commons,objectiser/overlord-commons,Governance/overlord-commons,objectiser/overlord-commons,objectiser/overlord-commons | /*
* Copyright 2013 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.commons.maven.plugin;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.artifact.resolver.ArtifactResolutionRequest;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.apache.maven.repository.RepositorySystem;
import org.apache.maven.shared.artifact.filter.PatternExcludesArtifactFilter;
import org.apache.maven.shared.artifact.filter.PatternIncludesArtifactFilter;
import org.apache.maven.shared.artifact.filter.ScopeArtifactFilter;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import org.apache.maven.shared.dependency.graph.DependencyNode;
import org.apache.maven.shared.dependency.graph.traversal.CollectingDependencyNodeVisitor;
import org.overlord.commons.maven.plugin.featuresxml.FeaturesXml;
/**
* A mojo that can generate a karaf features.xml file.
*
* @author [email protected]
*/
@Mojo(name = "generate-features-xml", requiresDependencyResolution = ResolutionScope.RUNTIME, threadSafe = true, defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
public class GenerateFeaturesXmlMojo extends AbstractMojo {
@Parameter(property = "generate-features-xml.outputFile", defaultValue = "${project.build.outputDirectory}/features.xml")
private String outputFile;
@Parameter(property = "generate-features-xml.attach", defaultValue = "false")
private String attach;
@Parameter(property = "generate-features-xml.features")
private List<Feature> features;
@Parameter(property = "generate-features-xml.repositories")
private List<String> repositories;
@Component
private MavenProject project;
@Component(hint = "default")
private DependencyGraphBuilder dependencyGraphBuilder;
@Component
protected RepositorySystem repositorySystem;
@Component
private MavenProjectHelper projectHelper;
/**
* Constructor.
*/
public GenerateFeaturesXmlMojo() {
}
/**
* @see org.apache.maven.plugin.Mojo#execute()
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("-------------------------------------------------"); //$NON-NLS-1$
getLog().info("Generating Karaf compatible features.xml file to:"); //$NON-NLS-1$
getLog().info(" " + outputFile); //$NON-NLS-1$
getLog().info("-------------------------------------------------"); //$NON-NLS-1$
try {
FeaturesXml featuresXml = new FeaturesXml();
generate(featuresXml);
File file = new File(outputFile);
file.getParentFile().mkdirs();
featuresXml.writeTo(file);
if ("true".equals(attach)) { //$NON-NLS-1$
attachToBuild(file);
}
} catch (Exception e) {
throw new MojoExecutionException(e.getMessage(), e);
}
}
/**
* Attaches the features.xml file to the build.
* @param file the generated features.xml file
*/
private void attachToBuild(File file) {
projectHelper.attachArtifact(this.project, "xml", "features", file); //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* @param featuresXml
*/
private void generate(FeaturesXml featuresXml) throws Exception {
// Add the repositories
if (this.repositories != null) {
for (String repo : repositories) {
featuresXml.addRepository(repo);
}
}
// Collect all dependencies (bundle candidates)
ScopeArtifactFilter filter = new ScopeArtifactFilter(DefaultArtifact.SCOPE_RUNTIME);
DependencyNode dependencyGraph = dependencyGraphBuilder.buildDependencyGraph(project, filter);
CollectingDependencyNodeVisitor collectingVizzy = new CollectingDependencyNodeVisitor();
dependencyGraph.accept(collectingVizzy);
List<DependencyNode> nodes = collectingVizzy.getNodes();
// Iterate all features
for (Feature feature : features) {
getLog().info("Generating feature '" + feature.getName() + "'"); //$NON-NLS-1$ //$NON-NLS-2$
// Create the feature
featuresXml.addFeature(feature.getName(), feature.getVersion(), feature.getComment());
// Add any feature dependencies
List<Feature> onFeatures = feature.getDependsOnFeatures();
if (onFeatures != null && !onFeatures.isEmpty()) {
for (Feature onFeature : onFeatures) {
getLog().info(
" Depends on feature: " + onFeature.getName() + "/" + onFeature.getVersion()); //$NON-NLS-1$ //$NON-NLS-2$
featuresXml.addFeatureDependency(feature.getName(), feature.getVersion(),
onFeature.getName(), onFeature.getVersion());
}
}
// Add any included or non-excluded bundles (from artifact
// dependency graph)
PatternIncludesArtifactFilter includesFilter = new PatternIncludesArtifactFilter(
feature.getIncludes());
PatternExcludesArtifactFilter excludesFilter = new PatternExcludesArtifactFilter(
feature.getExcludes());
for (DependencyNode dependencyNode : nodes) {
if (isSelf(dependencyNode))
continue;
Artifact artifact = dependencyNode.getArtifact();
// If no includes, assume everything
boolean includeBundle = feature.getIncludes() == null || feature.getIncludes().isEmpty();
if (includeBundle) {
getLog().debug(" Artifact " + artifact + " matches default [all] filter (including)."); //$NON-NLS-1$ //$NON-NLS-2$
}
if (includesFilter.include(artifact)) {
getLog().debug(" Artifact " + artifact + " matched include filter (including)."); //$NON-NLS-1$ //$NON-NLS-2$
includeBundle = true;
}
// Excludes must be explicit.
if (!excludesFilter.include(artifact)) {
getLog().debug(" Artifact " + artifact + " matched exclude filter (excluding)."); //$NON-NLS-1$ //$NON-NLS-2$
includeBundle = false;
}
if (includeBundle) {
featuresXml.addBundle(feature.getName(), feature.getVersion(),
formatArtifactAsBundle(artifact));
}
}
// Add additional explicit bundles specified in the config
List<String> bundles = feature.getBundles();
if (bundles != null && !bundles.isEmpty()) {
for (String bundle : bundles) {
getLog().debug(" Adding explicit bundle: " + bundle); //$NON-NLS-1$
featuresXml.addBundle(feature.getName(), feature.getVersion(), bundle);
}
}
}
}
/**
* Returns true if this dependency is really just ourselves.
* @param dependencyNode
*/
private boolean isSelf(DependencyNode dependencyNode) {
return project.getGroupId().equals(dependencyNode.getArtifact().getGroupId()) &&
project.getArtifactId().equals(dependencyNode.getArtifact().getArtifactId());
}
/**
* Format the given artifact as a bundle string with the appropriate syntax
* used by the karaf features.xml file. For example:
*
* mvn:commons-configuration/commons-configuration/1.6
*
* @param artifact
*/
private String formatArtifactAsBundle(Artifact artifact) throws Exception {
StringBuilder builder = new StringBuilder();
// If it's a bundle already, awesome. If not, we need to wrap it
// and include some useful meta-data.
if (isBundle(artifact)) {
// Example: mvn:commons-configuration/commons-configuration/1.6
builder.append("mvn:"); //$NON-NLS-1$
builder.append(artifact.getGroupId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getArtifactId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getBaseVersion());
String classifier = artifact.getClassifier();
if (classifier != null && classifier.trim().length() == 0) {
classifier = null;
}
if (!"jar".equalsIgnoreCase(artifact.getType()) || classifier != null) { //$NON-NLS-1$
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getType());
}
if (classifier != null) {
builder.append("/"); //$NON-NLS-1$
builder.append(classifier);
}
} else {
// Example: wrap:mvn:log4j/log4j/1.2.14$Bundle-SymbolicName=log4j.log4j&Bundle-Version=1.2.14&Bundle-Name=Log4j
builder.append("wrap:mvn:"); //$NON-NLS-1$
builder.append(artifact.getGroupId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getArtifactId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getBaseVersion());
String classifier = artifact.getClassifier();
if (classifier != null && classifier.trim().length() == 0) {
classifier = null;
}
if (!"jar".equalsIgnoreCase(artifact.getType()) || classifier != null) { //$NON-NLS-1$
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getType());
}
if (classifier != null) {
builder.append("/"); //$NON-NLS-1$
builder.append(classifier);
}
MavenProject project = resolveProject(artifact);
builder.append("$Bundle-SymbolicName="); //$NON-NLS-1$
builder.append(artifact.getGroupId());
builder.append("."); //$NON-NLS-1$
builder.append(artifact.getArtifactId());
builder.append("&Bundle-Version="); //$NON-NLS-1$
builder.append(sanitizeVersionForOsgi(artifact.getBaseVersion()));
if (project.getName() != null && project.getName().trim().length() > 0) {
builder.append("&Bundle-Name="); //$NON-NLS-1$
builder.append(project.getName());
}
}
return builder.toString();
}
/**
* OSGi doesn't allow non-numeric components in version strings. So for
* example a common maven version is 2.0.0-SNAPSHOT. This needs to be
* converted to 2.0.0 so that OSGi will parse it without an exception. I
* don't have a great way to do this generically, so we'll just need to
* update this method with additional fixes as we find problematic version
* strings.
* @param version
*/
private Object sanitizeVersionForOsgi(String version) {
// Remove -SNAPSHOT
if (version.contains("-")) { //$NON-NLS-1$
version = version.substring(0, version.indexOf('-'));
}
// Fix things like 1.3.5a (becomes 1.3.5)
String ver = version.replaceAll("([0-9])[a-zA-Z]+", "$1"); //$NON-NLS-1$ //$NON-NLS-2$
if (!ver.contains(".")) { //$NON-NLS-1$
return ver;
}
// Handle the case where there are only 2 numberic and one non-numeric component
// like 1.7.Alpha. Converts to 1.7.0.Alpha
String[] split = ver.split("\\."); //$NON-NLS-1$
if (split.length == 3) {
if (isNumeric(split[0]) && isNumeric(split[1]) && isAlpha(split[2])) {
return split[0] + "." + split[1] + ".0." + split[2]; //$NON-NLS-1$ //$NON-NLS-2$
}
}
return ver;
}
/**
* @param versionComponent
*/
private boolean isAlpha(String versionComponent) {
return versionComponent.length() > 0 && Character.isLetter(versionComponent.charAt(0));
}
/**
* @param versionComponent
*/
private boolean isNumeric(String versionComponent) {
for (int i = 0; i < versionComponent.length(); i++) {
if (!Character.isDigit(versionComponent.charAt(i))) {
return false;
}
}
return true;
}
/**
* Detect if this artifact is already an osgi bundle. If it is, then we don't need
* to wrap it. The best way to figure this out is to crack open the JAR and take a look
* at the manifest.
* @param artifact
* @throws Exception
*/
private boolean isBundle(Artifact artifact) throws Exception {
// Resolve the artifact.
ArtifactResolutionRequest request = new ArtifactResolutionRequest().setArtifact(artifact);
ArtifactResolutionResult result = repositorySystem.resolve(request);
// If not found, then assume it's a reactor dependency and therefore should be a bundle.
if (result.getArtifacts().isEmpty()) {
getLog().info("Artifact " + artifact.toString() + " not found in local repository, assuming reactor dependency."); //$NON-NLS-1$ //$NON-NLS-2$
return true;
}
artifact = result.getArtifacts().iterator().next();
if (!artifact.getFile().isFile()) {
throw new Exception("Resolved artifact is not a file: " + artifact.getFile().getAbsolutePath()); //$NON-NLS-1$
}
// Crack open the dependency JAR, read the manifest, check for osgi attributes.
JarFile jf = null;
try {
jf = new JarFile(artifact.getFile());
Manifest manifest = jf.getManifest();
if (manifest == null) {
getLog().info("Artifact " + artifact.toString() + " missing a manifest! Assuming not a bundle."); //$NON-NLS-1$ //$NON-NLS-2$
return false;
}
Attributes attributes = manifest.getMainAttributes();
if (attributes != null) {
String value = attributes.getValue("Bundle-SymbolicName"); //$NON-NLS-1$
if (value != null && value.trim().length() > 0) {
return true;
}
}
} finally {
jf.close();
}
return false;
}
/**
* Resolves the given artifact to a maven project.
* @param artifact
* @throws Exception
*/
private MavenProject resolveProject(Artifact artifact) throws Exception {
Artifact pomArtifact = repositorySystem.createArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), "", "pom"); //$NON-NLS-1$ //$NON-NLS-2$
ArtifactResolutionRequest request = new ArtifactResolutionRequest();
request.setArtifact(pomArtifact);
ArtifactResolutionResult resolved = repositorySystem.resolve(request);
pomArtifact = resolved.getArtifacts().iterator().next();
InputStream contentStream = null;
MavenProject project = null;
try {
contentStream = new FileInputStream(pomArtifact.getFile());
Model model = new MavenXpp3Reader().read(contentStream);
project = new MavenProject(model);
} finally {
contentStream.close();
}
return project;
}
}
| overlord-commons-maven-plugin/src/main/java/org/overlord/commons/maven/plugin/GenerateFeaturesXmlMojo.java | /*
* Copyright 2013 JBoss Inc
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.overlord.commons.maven.plugin;
import java.io.File;
import java.io.FileInputStream;
import java.io.InputStream;
import java.util.List;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.DefaultArtifact;
import org.apache.maven.artifact.resolver.ArtifactResolutionRequest;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.model.Model;
import org.apache.maven.model.io.xpp3.MavenXpp3Reader;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.apache.maven.project.MavenProjectHelper;
import org.apache.maven.repository.RepositorySystem;
import org.apache.maven.shared.artifact.filter.PatternExcludesArtifactFilter;
import org.apache.maven.shared.artifact.filter.PatternIncludesArtifactFilter;
import org.apache.maven.shared.artifact.filter.ScopeArtifactFilter;
import org.apache.maven.shared.dependency.graph.DependencyGraphBuilder;
import org.apache.maven.shared.dependency.graph.DependencyNode;
import org.apache.maven.shared.dependency.graph.traversal.CollectingDependencyNodeVisitor;
import org.overlord.commons.maven.plugin.featuresxml.FeaturesXml;
/**
* A mojo that can generate a karaf features.xml file.
*
* @author [email protected]
*/
@Mojo(name = "generate-features-xml", requiresDependencyResolution = ResolutionScope.RUNTIME, threadSafe = true, defaultPhase = LifecyclePhase.GENERATE_RESOURCES)
public class GenerateFeaturesXmlMojo extends AbstractMojo {
@Parameter(property = "generate-features-xml.outputFile", defaultValue = "${project.build.outputDirectory}/features.xml")
private String outputFile;
@Parameter(property = "generate-features-xml.attach", defaultValue = "false")
private String attach;
@Parameter(property = "generate-features-xml.features")
private List<Feature> features;
@Parameter(property = "generate-features-xml.repositories")
private List<String> repositories;
@Component
private MavenProject project;
@Component(hint = "default")
private DependencyGraphBuilder dependencyGraphBuilder;
@Component
protected RepositorySystem repositorySystem;
@Component
private MavenProjectHelper projectHelper;
/**
* Constructor.
*/
public GenerateFeaturesXmlMojo() {
}
/**
* @see org.apache.maven.plugin.Mojo#execute()
*/
@Override
public void execute() throws MojoExecutionException, MojoFailureException {
getLog().info("-------------------------------------------------"); //$NON-NLS-1$
getLog().info("Generating Karaf compatible features.xml file to:"); //$NON-NLS-1$
getLog().info(" " + outputFile); //$NON-NLS-1$
getLog().info("-------------------------------------------------"); //$NON-NLS-1$
try {
FeaturesXml featuresXml = new FeaturesXml();
generate(featuresXml);
File file = new File(outputFile);
file.getParentFile().mkdirs();
featuresXml.writeTo(file);
if ("true".equals(attach)) { //$NON-NLS-1$
attachToBuild(file);
}
} catch (Exception e) {
throw new MojoExecutionException(e.getMessage(), e);
}
}
/**
* Attaches the features.xml file to the build.
* @param file the generated features.xml file
*/
private void attachToBuild(File file) {
projectHelper.attachArtifact(this.project, "xml", "features", file); //$NON-NLS-1$ //$NON-NLS-2$
}
/**
* @param featuresXml
*/
private void generate(FeaturesXml featuresXml) throws Exception {
// Add the repositories
if (this.repositories != null) {
for (String repo : repositories) {
featuresXml.addRepository(repo);
}
}
// Collect all dependencies (bundle candidates)
ScopeArtifactFilter filter = new ScopeArtifactFilter(DefaultArtifact.SCOPE_RUNTIME);
DependencyNode dependencyGraph = dependencyGraphBuilder.buildDependencyGraph(project, filter);
CollectingDependencyNodeVisitor collectingVizzy = new CollectingDependencyNodeVisitor();
dependencyGraph.accept(collectingVizzy);
List<DependencyNode> nodes = collectingVizzy.getNodes();
// Iterate all features
for (Feature feature : features) {
getLog().info("Generating feature '" + feature.getName() + "'"); //$NON-NLS-1$ //$NON-NLS-2$
// Create the feature
featuresXml.addFeature(feature.getName(), feature.getVersion(), feature.getComment());
// Add any feature dependencies
List<Feature> onFeatures = feature.getDependsOnFeatures();
if (onFeatures != null && !onFeatures.isEmpty()) {
for (Feature onFeature : onFeatures) {
getLog().info(
" Depends on feature: " + onFeature.getName() + "/" + onFeature.getVersion()); //$NON-NLS-1$ //$NON-NLS-2$
featuresXml.addFeatureDependency(feature.getName(), feature.getVersion(),
onFeature.getName(), onFeature.getVersion());
}
}
// Add any included or non-excluded bundles (from artifact
// dependency graph)
PatternIncludesArtifactFilter includesFilter = new PatternIncludesArtifactFilter(
feature.getIncludes());
PatternExcludesArtifactFilter excludesFilter = new PatternExcludesArtifactFilter(
feature.getExcludes());
for (DependencyNode dependencyNode : nodes) {
if (isSelf(dependencyNode))
continue;
Artifact artifact = dependencyNode.getArtifact();
// If no includes, assume everything
boolean includeBundle = feature.getIncludes() == null || feature.getIncludes().isEmpty();
if (includeBundle) {
getLog().debug(" Artifact " + artifact + " matches default [all] filter (including)."); //$NON-NLS-1$ //$NON-NLS-2$
}
if (includesFilter.include(artifact)) {
getLog().debug(" Artifact " + artifact + " matched include filter (including)."); //$NON-NLS-1$ //$NON-NLS-2$
includeBundle = true;
}
// Excludes must be explicit.
if (!excludesFilter.include(artifact)) {
getLog().debug(" Artifact " + artifact + " matched exclude filter (excluding)."); //$NON-NLS-1$ //$NON-NLS-2$
includeBundle = false;
}
if (includeBundle) {
featuresXml.addBundle(feature.getName(), feature.getVersion(),
formatArtifactAsBundle(artifact));
}
}
// Add additional explicit bundles specified in the config
List<String> bundles = feature.getBundles();
if (bundles != null && !bundles.isEmpty()) {
for (String bundle : bundles) {
getLog().debug(" Adding explicit bundle: " + bundle); //$NON-NLS-1$
featuresXml.addBundle(feature.getName(), feature.getVersion(), bundle);
}
}
}
}
/**
* Returns true if this dependency is really just ourselves.
* @param dependencyNode
*/
private boolean isSelf(DependencyNode dependencyNode) {
return project.getGroupId().equals(dependencyNode.getArtifact().getGroupId()) &&
project.getArtifactId().equals(dependencyNode.getArtifact().getArtifactId());
}
/**
* Format the given artifact as a bundle string with the appropriate syntax
* used by the karaf features.xml file. For example:
*
* mvn:commons-configuration/commons-configuration/1.6
*
* @param artifact
*/
private String formatArtifactAsBundle(Artifact artifact) throws Exception {
StringBuilder builder = new StringBuilder();
// If it's a bundle already, awesome. If not, we need to wrap it
// and include some useful meta-data.
if (isBundle(artifact)) {
// Example: mvn:commons-configuration/commons-configuration/1.6
builder.append("mvn:"); //$NON-NLS-1$
builder.append(artifact.getGroupId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getArtifactId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getBaseVersion());
if (!"jar".equalsIgnoreCase(artifact.getType())) { //$NON-NLS-1$
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getType());
}
} else {
// Example: wrap:mvn:log4j/log4j/1.2.14$Bundle-SymbolicName=log4j.log4j&Bundle-Version=1.2.14&Bundle-Name=Log4j
builder.append("wrap:mvn:"); //$NON-NLS-1$
builder.append(artifact.getGroupId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getArtifactId());
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getBaseVersion());
if (!"jar".equalsIgnoreCase(artifact.getType())) { //$NON-NLS-1$
builder.append("/"); //$NON-NLS-1$
builder.append(artifact.getType());
}
MavenProject project = resolveProject(artifact);
builder.append("$Bundle-SymbolicName="); //$NON-NLS-1$
builder.append(artifact.getGroupId());
builder.append("."); //$NON-NLS-1$
builder.append(artifact.getArtifactId());
builder.append("&Bundle-Version="); //$NON-NLS-1$
builder.append(sanitizeVersionForOsgi(artifact.getBaseVersion()));
if (project.getName() != null && project.getName().trim().length() > 0) {
builder.append("&Bundle-Name="); //$NON-NLS-1$
builder.append(project.getName());
}
}
return builder.toString();
}
/**
* OSGi doesn't allow non-numeric components in version strings. So for
* example a common maven version is 2.0.0-SNAPSHOT. This needs to be
* converted to 2.0.0 so that OSGi will parse it without an exception. I
* don't have a great way to do this generically, so we'll just need to
* update this method with additional fixes as we find problematic version
* strings.
* @param version
*/
private Object sanitizeVersionForOsgi(String version) {
// Remove -SNAPSHOT
if (version.contains("-")) { //$NON-NLS-1$
version = version.substring(0, version.indexOf('-'));
}
// Fix things like 1.3.5a (becomes 1.3.5)
String ver = version.replaceAll("([0-9])[a-zA-Z]+", "$1"); //$NON-NLS-1$ //$NON-NLS-2$
if (!ver.contains(".")) { //$NON-NLS-1$
return ver;
}
// Handle the case where there are only 2 numberic and one non-numeric component
// like 1.7.Alpha. Converts to 1.7.0.Alpha
String[] split = ver.split("\\."); //$NON-NLS-1$
if (split.length == 3) {
if (isNumeric(split[0]) && isNumeric(split[1]) && isAlpha(split[2])) {
return split[0] + "." + split[1] + ".0." + split[2]; //$NON-NLS-1$ //$NON-NLS-2$
}
}
return ver;
}
/**
* @param versionComponent
*/
private boolean isAlpha(String versionComponent) {
return versionComponent.length() > 0 && Character.isLetter(versionComponent.charAt(0));
}
/**
* @param versionComponent
*/
private boolean isNumeric(String versionComponent) {
for (int i = 0; i < versionComponent.length(); i++) {
if (!Character.isDigit(versionComponent.charAt(i))) {
return false;
}
}
return true;
}
/**
* Detect if this artifact is already an osgi bundle. If it is, then we don't need
* to wrap it. The best way to figure this out is to crack open the JAR and take a look
* at the manifest.
* @param artifact
* @throws Exception
*/
private boolean isBundle(Artifact artifact) throws Exception {
// Resolve the artifact.
ArtifactResolutionRequest request = new ArtifactResolutionRequest().setArtifact(artifact);
ArtifactResolutionResult result = repositorySystem.resolve(request);
// If not found, then assume it's a reactor dependency and therefore should be a bundle.
if (result.getArtifacts().isEmpty()) {
getLog().info("Artifact " + artifact.toString() + " not found in local repository, assuming reactor dependency."); //$NON-NLS-1$ //$NON-NLS-2$
return true;
}
artifact = result.getArtifacts().iterator().next();
if (!artifact.getFile().isFile()) {
throw new Exception("Resolved artifact is not a file: " + artifact.getFile().getAbsolutePath()); //$NON-NLS-1$
}
// Crack open the dependency JAR, read the manifest, check for osgi attributes.
JarFile jf = null;
try {
jf = new JarFile(artifact.getFile());
Manifest manifest = jf.getManifest();
if (manifest == null) {
getLog().info("Artifact " + artifact.toString() + " missing a manifest! Assuming not a bundle."); //$NON-NLS-1$ //$NON-NLS-2$
return false;
}
Attributes attributes = manifest.getMainAttributes();
if (attributes != null) {
String value = attributes.getValue("Bundle-SymbolicName"); //$NON-NLS-1$
if (value != null && value.trim().length() > 0) {
return true;
}
}
} finally {
jf.close();
}
return false;
}
/**
* Resolves the given artifact to a maven project.
* @param artifact
* @throws Exception
*/
private MavenProject resolveProject(Artifact artifact) throws Exception {
Artifact pomArtifact = repositorySystem.createArtifact(artifact.getGroupId(), artifact.getArtifactId(), artifact.getVersion(), "", "pom"); //$NON-NLS-1$ //$NON-NLS-2$
ArtifactResolutionRequest request = new ArtifactResolutionRequest();
request.setArtifact(pomArtifact);
ArtifactResolutionResult resolved = repositorySystem.resolve(request);
pomArtifact = resolved.getArtifacts().iterator().next();
InputStream contentStream = null;
MavenProject project = null;
try {
contentStream = new FileInputStream(pomArtifact.getFile());
Model model = new MavenXpp3Reader().read(contentStream);
project = new MavenProject(model);
} finally {
contentStream.close();
}
return project;
}
}
| support classifiers
| overlord-commons-maven-plugin/src/main/java/org/overlord/commons/maven/plugin/GenerateFeaturesXmlMojo.java | support classifiers |
|
Java | apache-2.0 | 47a9bae2a519cb79b40c73f8df2f15941b946b0e | 0 | hasinitg/airavata,gouravshenoy/airavata,anujbhan/airavata,apache/airavata,anujbhan/airavata,glahiru/airavata,glahiru/airavata,jjj117/airavata,hasinitg/airavata,dogless/airavata,machristie/airavata,dogless/airavata,gouravshenoy/airavata,dogless/airavata,anujbhan/airavata,jjj117/airavata,glahiru/airavata,apache/airavata,apache/airavata,machristie/airavata,glahiru/airavata,machristie/airavata,dogless/airavata,gouravshenoy/airavata,machristie/airavata,apache/airavata,anujbhan/airavata,anujbhan/airavata,machristie/airavata,apache/airavata,anujbhan/airavata,dogless/airavata,gouravshenoy/airavata,jjj117/airavata,anujbhan/airavata,machristie/airavata,apache/airavata,jjj117/airavata,machristie/airavata,jjj117/airavata,gouravshenoy/airavata,hasinitg/airavata,hasinitg/airavata,gouravshenoy/airavata,gouravshenoy/airavata,hasinitg/airavata,hasinitg/airavata,glahiru/airavata,apache/airavata,jjj117/airavata,apache/airavata,dogless/airavata | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.api.impl;
import junit.framework.Assert;
import org.apache.airavata.commons.gfac.type.ApplicationDeploymentDescription;
import org.apache.airavata.commons.gfac.type.HostDescription;
import org.apache.airavata.commons.gfac.type.ServiceDescription;
import org.apache.airavata.registry.api.exception.RegistryException;
import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
import org.apache.airavata.schemas.gfac.InputParameterType;
import org.apache.airavata.schemas.gfac.OutputParameterType;
import org.apache.airavata.schemas.gfac.StringParameterType;
import org.junit.Before;
import org.junit.Test;
import javax.jcr.RepositoryException;
import java.util.ArrayList;
import java.util.List;
public class JCRRegistrySearchTest {
@Before
public void setUp() throws Exception {
/*
* Create database
*/
JCRRegistry jcrRegistry = new JCRRegistry(null,
"org.apache.jackrabbit.core.RepositoryFactoryImpl", "admin",
"admin", null);
/*
* Host
*/
HostDescription host = new HostDescription();
host.getType().setHostName("localhostsearch");
host.getType().setHostAddress("localhost");
HostDescription host1 = new HostDescription();
host1.getType().setHostName("localhost");
host1.getType().setHostAddress("121.121.12.121");
/*
* App
*/
ApplicationDeploymentDescription appDesc = new ApplicationDeploymentDescription();
ApplicationDeploymentDescriptionType app = appDesc.getType();
ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
name.setStringValue("EchoLocalSearch");
app.setApplicationName(name);
app.setExecutableLocation("/bin/echo");
app.setScratchWorkingDirectory("/tmp");
app.setStaticWorkingDirectory("/tmp");
app.setInputDataDirectory("/tmp/input");
app.setOutputDataDirectory("/tmp/output");
app.setStandardOutput("/tmp/echo.stdout");
app.setStandardError("/tmp/echo.stdout");
/*
* Service
*/
ServiceDescription serv = new ServiceDescription();
serv.getType().setName("SimpleEchoSearch");
ServiceDescription serv1 = new ServiceDescription();
serv1.getType().setName("MathService");
List<InputParameterType> inputList = new ArrayList<InputParameterType>();
InputParameterType input = InputParameterType.Factory.newInstance();
input.setParameterName("echo_input");
input.setParameterType(StringParameterType.Factory.newInstance());
inputList.add(input);
InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
.size()]);
List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
OutputParameterType output = OutputParameterType.Factory.newInstance();
output.setParameterName("echo_output");
output.setParameterType(StringParameterType.Factory.newInstance());
outputList.add(output);
OutputParameterType[] outputParamList = outputList
.toArray(new OutputParameterType[outputList.size()]);
serv.getType().setInputParametersArray(inputParamList);
serv.getType().setOutputParametersArray(outputParamList);
serv1.getType().setInputParametersArray(inputParamList);
serv1.getType().setOutputParametersArray(outputParamList);
/*
* Save to registry
*/
jcrRegistry.saveHostDescription(host);
jcrRegistry.saveHostDescription(host1);
jcrRegistry.saveDeploymentDescription(serv.getType().getName(), host
.getType().getHostName(), appDesc);
jcrRegistry.saveDeploymentDescription(serv1.getType().getName(), host
.getType().getHostName(), appDesc);
jcrRegistry.saveDeploymentDescription(serv1.getType().getName(), host1
.getType().getHostName(), appDesc);
jcrRegistry.saveServiceDescription(serv);
jcrRegistry.saveServiceDescription(serv1);
jcrRegistry.deployServiceOnHost(serv.getType().getName(), host
.getType().getHostName());
jcrRegistry.deployServiceOnHost(serv1.getType().getName(), host
.getType().getHostName());
jcrRegistry.deployServiceOnHost(serv1.getType().getName(), host1
.getType().getHostName());
}
@Test
public void searchServiceDescriptionTest() {
try {
JCRRegistry jcrRegistry = new JCRRegistry(null,
"org.apache.jackrabbit.core.RepositoryFactoryImpl", "admin",
"admin", null);
List<ServiceDescription> simpleEcho = jcrRegistry.searchServiceDescription("SimpleEchoSearch");
if(simpleEcho.size() == 0){
Assert.assertTrue(false);
}else{
Assert.assertEquals("SimpleEchoSearch",simpleEcho.get(0).getType().getName());
}
} catch (RepositoryException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (RegistryException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
Assert.assertTrue(true);
}
// @Test
// public void searchDeploymentDescriptorTest() {
// try {
// JCRRegistry jcrRegistry = new JCRRegistry(null,
// "org.apache.jackrabbit.core.RepositoryFactoryImpl", "admin",
// "admin", null);
// jcrRegistry.searchDeploymentDescription();
// List<ServiceDescription> simpleEcho = jcrRegistry.searchServiceDescription("SimpleEchoSearch");
// if(simpleEcho.size() == 0){
// Assert.assertTrue(false);
// }else{
// Assert.assertEquals("SimpleEchoSearch",simpleEcho.get(0).getType().getName());
// }
// } catch (RepositoryException e) {
// e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
// } catch (RegistryException e) {
// e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
// }
// Assert.assertTrue(true);
// }
}
| modules/commons/registry-api/src/test/java/org/apache/airavata/registry/api/impl/JCRRegistrySearchTest.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.airavata.registry.api.impl;
import junit.framework.Assert;
import org.apache.airavata.commons.gfac.type.ApplicationDeploymentDescription;
import org.apache.airavata.commons.gfac.type.HostDescription;
import org.apache.airavata.commons.gfac.type.ServiceDescription;
import org.apache.airavata.registry.api.exception.RegistryException;
import org.apache.airavata.schemas.gfac.ApplicationDeploymentDescriptionType;
import org.apache.airavata.schemas.gfac.InputParameterType;
import org.apache.airavata.schemas.gfac.OutputParameterType;
import org.apache.airavata.schemas.gfac.StringParameterType;
import org.junit.Before;
import org.junit.Test;
import javax.jcr.RepositoryException;
import java.util.ArrayList;
import java.util.List;
public class JCRRegistrySearchTest {
@Before
public void setUp() throws Exception {
/*
* Create database
*/
JCRRegistry jcrRegistry = new JCRRegistry(null,
"org.apache.jackrabbit.core.RepositoryFactoryImpl", "admin",
"admin", null);
/*
* Host
*/
HostDescription host = new HostDescription();
host.getType().setHostName("localhostsearch");
host.getType().setHostAddress("localhost");
HostDescription host1 = new HostDescription();
host1.getType().setHostName("localhost");
host1.getType().setHostAddress("121.121.12.121");
/*
* App
*/
ApplicationDeploymentDescription appDesc = new ApplicationDeploymentDescription();
ApplicationDeploymentDescriptionType app = appDesc.getType();
ApplicationDeploymentDescriptionType.ApplicationName name = ApplicationDeploymentDescriptionType.ApplicationName.Factory.newInstance();
name.setStringValue("EchoLocalSearch");
app.setApplicationName(name);
app.setExecutableLocation("/bin/echo");
app.setScratchWorkingDirectory("/tmp");
app.setStaticWorkingDirectory("/tmp");
app.setInputDataDirectory("/tmp/input");
app.setOutputDataDirectory("/tmp/output");
app.setStandardOutput("/tmp/echo.stdout");
app.setStandardError("/tmp/echo.stdout");
/*
* Service
*/
ServiceDescription serv = new ServiceDescription();
serv.getType().setName("SimpleEchoSearch");
ServiceDescription serv1 = new ServiceDescription();
serv1.getType().setName("MathService");
List<InputParameterType> inputList = new ArrayList<InputParameterType>();
InputParameterType input = InputParameterType.Factory.newInstance();
input.setParameterName("echo_input");
input.setParameterType(StringParameterType.Factory.newInstance());
inputList.add(input);
InputParameterType[] inputParamList = inputList.toArray(new InputParameterType[inputList
.size()]);
List<OutputParameterType> outputList = new ArrayList<OutputParameterType>();
OutputParameterType output = OutputParameterType.Factory.newInstance();
output.setParameterName("echo_output");
output.setParameterType(StringParameterType.Factory.newInstance());
outputList.add(output);
OutputParameterType[] outputParamList = outputList
.toArray(new OutputParameterType[outputList.size()]);
serv.getType().setInputParametersArray(inputParamList);
serv.getType().setOutputParametersArray(outputParamList);
serv1.getType().setInputParametersArray(inputParamList);
serv1.getType().setOutputParametersArray(outputParamList);
/*
* Save to registry
*/
jcrRegistry.saveHostDescription(host);
jcrRegistry.saveHostDescription(host1);
jcrRegistry.saveDeploymentDescription(serv.getType().getName(), host
.getType().getHostName(), appDesc);
jcrRegistry.saveDeploymentDescription(serv1.getType().getName(), host
.getType().getHostName(), appDesc);
jcrRegistry.saveDeploymentDescription(serv1.getType().getName(), host1
.getType().getHostName(), appDesc);
jcrRegistry.saveServiceDescription(serv);
jcrRegistry.saveServiceDescription(serv1);
jcrRegistry.deployServiceOnHost(serv.getType().getName(), host
.getType().getHostName());
jcrRegistry.deployServiceOnHost(serv1.getType().getName(), host
.getType().getHostName());
jcrRegistry.deployServiceOnHost(serv1.getType().getName(), host1
.getType().getHostName());
}
@Test
public void searchServiceDescriptionTest() {
try {
JCRRegistry jcrRegistry = new JCRRegistry(null,
"org.apache.jackrabbit.core.RepositoryFactoryImpl", "admin",
"admin", null);
List<ServiceDescription> simpleEcho = jcrRegistry.searchServiceDescription("SimpleEchoSearch");
if(simpleEcho.size() == 0){
Assert.assertTrue(false);
}else{
Assert.assertEquals("SimpleEchoSearch",simpleEcho.get(0).getType().getName());
}
} catch (RepositoryException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (RegistryException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
Assert.assertTrue(true);
}
@Test
public void searchDeploymentDescriptorTest() {
try {
JCRRegistry jcrRegistry = new JCRRegistry(null,
"org.apache.jackrabbit.core.RepositoryFactoryImpl", "admin",
"admin", null);
jcrRegistry.searchDeploymentDescription()
List<ServiceDescription> simpleEcho = jcrRegistry.searchServiceDescription("SimpleEchoSearch");
if(simpleEcho.size() == 0){
Assert.assertTrue(false);
}else{
Assert.assertEquals("SimpleEchoSearch",simpleEcho.get(0).getType().getName());
}
} catch (RepositoryException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
} catch (RegistryException e) {
e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates.
}
Assert.assertTrue(true);
}
}
| fixing compilation error.
git-svn-id: 64c7115bac0e45f25b6ef7317621bf38f6d5f89e@1221988 13f79535-47bb-0310-9956-ffa450edef68
| modules/commons/registry-api/src/test/java/org/apache/airavata/registry/api/impl/JCRRegistrySearchTest.java | fixing compilation error. |
|
Java | apache-2.0 | f43b9c395f2b80b86cd7d55b084b6db3627667f1 | 0 | apache/activemq-artemis,kjniemi/activemq-artemis,jbertram/activemq-artemis,clebertsuconic/activemq-artemis,apache/activemq-artemis,andytaylor/activemq-artemis,clebertsuconic/activemq-artemis,graben/activemq-artemis,clebertsuconic/activemq-artemis,tabish121/activemq-artemis,kjniemi/activemq-artemis,kjniemi/activemq-artemis,andytaylor/activemq-artemis,apache/activemq-artemis,tabish121/activemq-artemis,andytaylor/activemq-artemis,tabish121/activemq-artemis,graben/activemq-artemis,clebertsuconic/activemq-artemis,graben/activemq-artemis,tabish121/activemq-artemis,TomRoss/activemq-artemis,jbertram/activemq-artemis,TomRoss/activemq-artemis,kjniemi/activemq-artemis,TomRoss/activemq-artemis,jbertram/activemq-artemis,kjniemi/activemq-artemis,andytaylor/activemq-artemis,apache/activemq-artemis,TomRoss/activemq-artemis,graben/activemq-artemis,jbertram/activemq-artemis | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jdbc.store.drivers;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Stream;
import org.apache.activemq.artemis.jdbc.store.logging.LoggingConnection;
import org.apache.activemq.artemis.jdbc.store.sql.SQLProvider;
import org.apache.activemq.artemis.journal.ActiveMQJournalLogger;
import org.jboss.logging.Logger;
/**
* Class to hold common database functionality such as drivers and connections
*/
@SuppressWarnings("SynchronizeOnNonFinalField")
public abstract class AbstractJDBCDriver {
private static final Logger logger = Logger.getLogger(AbstractJDBCDriver.class);
protected Connection connection;
protected SQLProvider sqlProvider;
private String jdbcConnectionUrl;
private String jdbcDriverClass;
private DataSource dataSource;
private Executor networkTimeoutExecutor;
private int networkTimeoutMillis;
private String user;
private String password;
public AbstractJDBCDriver() {
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public AbstractJDBCDriver(SQLProvider sqlProvider, String jdbcConnectionUrl, String user, String password, String jdbcDriverClass) {
this.jdbcConnectionUrl = jdbcConnectionUrl;
this.user = user;
this.password = password;
this.jdbcDriverClass = jdbcDriverClass;
this.sqlProvider = sqlProvider;
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public AbstractJDBCDriver(DataSource dataSource, SQLProvider provider) {
this.dataSource = dataSource;
this.sqlProvider = provider;
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public void start() throws SQLException {
connect();
synchronized (connection) {
createSchema();
prepareStatements();
}
}
public AbstractJDBCDriver(Connection connection, SQLProvider sqlProvider) {
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
} else {
this.connection = connection;
}
this.sqlProvider = sqlProvider;
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public void stop() throws SQLException {
synchronized (connection) {
if (sqlProvider.closeConnectionOnShutdown()) {
try {
connection.setAutoCommit(true);
connection.close();
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
throw e;
}
}
}
}
protected abstract void prepareStatements() throws SQLException;
protected abstract void createSchema() throws SQLException;
protected final void createTable(String... schemaSqls) throws SQLException {
createTableIfNotExists(sqlProvider.getTableName(), schemaSqls);
}
private void connect() throws SQLException {
if (connection == null) {
if (dataSource != null) {
try {
connection = dataSource.getConnection();
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
}
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
throw e;
}
} else {
try {
if (jdbcDriverClass == null || jdbcDriverClass.isEmpty()) {
throw new IllegalStateException("jdbcDriverClass is null or empty!");
}
if (jdbcConnectionUrl == null || jdbcConnectionUrl.isEmpty()) {
throw new IllegalStateException("jdbcConnectionUrl is null or empty!");
}
final Driver dbDriver = getDriver(jdbcDriverClass);
Properties properties = new Properties();
if (user != null) {
properties.setProperty("user", user);
properties.setProperty("password", password);
}
connection = dbDriver.connect(jdbcConnectionUrl, properties);
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
}
if (connection == null) {
throw new IllegalStateException("the driver: " + jdbcDriverClass + " isn't able to connect to the requested url: " + jdbcConnectionUrl);
}
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
ActiveMQJournalLogger.LOGGER.error("Unable to connect to database using URL: " + jdbcConnectionUrl);
throw e;
}
}
if (this.networkTimeoutMillis >= 0 && this.networkTimeoutExecutor == null) {
logger.warn("Unable to set a network timeout on the JDBC connection: networkTimeoutExecutor is null");
}
if (this.networkTimeoutMillis >= 0 && this.networkTimeoutExecutor != null) {
try {
connection.setNetworkTimeout(this.networkTimeoutExecutor, this.networkTimeoutMillis);
} catch (SQLException e) {
logger.warn(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
ActiveMQJournalLogger.LOGGER.warn("Unable to set a network timeout on the JDBC connection");
} catch (Throwable throwable) {
//it included SecurityExceptions and UnsupportedOperationException
logger.warn("Unable to set a network timeout on the JDBC connection", throwable);
}
}
}
}
public void destroy() throws Exception {
final String dropTableSql = "DROP TABLE " + sqlProvider.getTableName();
try {
connection.setAutoCommit(false);
try (Statement statement = connection.createStatement()) {
statement.executeUpdate(dropTableSql);
}
connection.commit();
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e, dropTableSql));
try {
connection.rollback();
} catch (SQLException rollbackEx) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), rollbackEx, dropTableSql));
throw rollbackEx;
}
throw e;
}
}
private void createTableIfNotExists(String tableName, String... sqls) throws SQLException {
logger.tracef("Validating if table %s didn't exist before creating", tableName);
try {
connection.setAutoCommit(false);
final boolean tableExists;
try (ResultSet rs = connection.getMetaData().getTables(null, null, tableName, null)) {
if (rs == null || !rs.next()) {
tableExists = false;
if (logger.isTraceEnabled()) {
logger.tracef("Table %s did not exist, creating it with SQL=%s", tableName, Arrays.toString(sqls));
}
if (rs != null) {
final SQLWarning sqlWarning = rs.getWarnings();
if (sqlWarning != null) {
logger.warn(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), sqlWarning));
}
}
} else {
tableExists = true;
}
}
if (tableExists) {
logger.tracef("Validating if the existing table %s is initialized or not", tableName);
try (Statement statement = connection.createStatement();
ResultSet cntRs = statement.executeQuery(sqlProvider.getCountJournalRecordsSQL())) {
logger.tracef("Validation of the existing table %s initialization is started", tableName);
int rows;
if (cntRs.next() && (rows = cntRs.getInt(1)) > 0) {
logger.tracef("Table %s did exist but is not empty. Skipping initialization. Found %d rows.", tableName, rows);
if (logger.isDebugEnabled()) {
final long expectedRows = Stream.of(sqls).map(String::toUpperCase).filter(sql -> sql.contains("INSERT INTO")).count();
if (rows < expectedRows) {
logger.debug("Table " + tableName + " was expected to contain " + expectedRows + " rows while it has " + rows + " rows.");
}
}
connection.commit();
return;
} else {
sqls = Stream.of(sqls).filter(sql -> {
final String upperCaseSql = sql.toUpperCase();
return !(upperCaseSql.contains("CREATE TABLE") || upperCaseSql.contains("CREATE INDEX"));
}).toArray(String[]::new);
if (sqls.length > 0) {
logger.tracef("Table %s did exist but is empty. Starting initialization.", tableName);
} else {
logger.tracef("Table %s did exist but is empty. Initialization completed: no initialization statements left.", tableName);
}
}
} catch (SQLException e) {
//that's not a real issue and do not deserve any user-level log:
//some DBMS just return stale information about table existence
//and can fail on later attempts to access them
if (logger.isTraceEnabled()) {
logger.trace(JDBCUtils.appendSQLExceptionDetails(new StringBuilder("Can't verify the initialization of table ").append(tableName).append(" due to:"), e, sqlProvider.getCountJournalRecordsSQL()));
}
try {
connection.rollback();
} catch (SQLException rollbackEx) {
logger.debug("Rollback failed while validating initialization of a table", rollbackEx);
}
connection.setAutoCommit(false);
logger.tracef("Table %s seems to exist, but we can't verify the initialization. Keep trying to create and initialize.", tableName);
}
}
if (sqls.length > 0) {
try (Statement statement = connection.createStatement()) {
for (String sql : sqls) {
statement.executeUpdate(sql);
final SQLWarning statementSqlWarning = statement.getWarnings();
if (statementSqlWarning != null) {
logger.warn(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), statementSqlWarning, sql));
}
}
}
connection.commit();
}
} catch (SQLException e) {
final String sqlStatements = String.join("\n", sqls);
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e, sqlStatements));
try {
connection.rollback();
} catch (SQLException rollbackEx) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), rollbackEx, sqlStatements));
throw rollbackEx;
}
throw e;
}
}
private static AtomicBoolean shutAdded = new AtomicBoolean(false);
private static class ShutdownDerby extends Thread {
@Override
public void run() {
try {
DriverManager.getConnection("jdbc:derby:;shutdown=true");
} catch (Exception e) {
}
}
}
private Driver getDriver(String className) {
try {
Driver driver = (Driver) Class.forName(className).newInstance();
// Shutdown the derby if using the derby embedded driver.
if (className.equals("org.apache.derby.jdbc.EmbeddedDriver")) {
if (shutAdded.compareAndSet(false, true)) {
Runtime.getRuntime().addShutdownHook(new ShutdownDerby());
}
}
return driver;
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Could not find class: " + className);
} catch (Exception e) {
throw new RuntimeException("Unable to instantiate driver class: ", e);
}
}
public Connection getConnection() {
return connection;
}
public final void setConnection(Connection connection) {
if (this.connection == null) {
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
} else {
this.connection = connection;
}
}
}
public void setSqlProvider(SQLProvider sqlProvider) {
this.sqlProvider = sqlProvider;
}
public void setJdbcConnectionUrl(String jdbcConnectionUrl) {
this.jdbcConnectionUrl = jdbcConnectionUrl;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public void setJdbcDriverClass(String jdbcDriverClass) {
this.jdbcDriverClass = jdbcDriverClass;
}
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
public void setNetworkTimeout(Executor executor, int milliseconds) {
this.networkTimeoutExecutor = executor;
this.networkTimeoutMillis = milliseconds;
}
}
| artemis-jdbc-store/src/main/java/org/apache/activemq/artemis/jdbc/store/drivers/AbstractJDBCDriver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jdbc.store.drivers;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Driver;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.SQLWarning;
import java.sql.Statement;
import java.util.Arrays;
import java.util.Properties;
import java.util.concurrent.Executor;
import java.util.stream.Stream;
import org.apache.activemq.artemis.jdbc.store.logging.LoggingConnection;
import org.apache.activemq.artemis.jdbc.store.sql.SQLProvider;
import org.apache.activemq.artemis.journal.ActiveMQJournalLogger;
import org.jboss.logging.Logger;
/**
* Class to hold common database functionality such as drivers and connections
*/
@SuppressWarnings("SynchronizeOnNonFinalField")
public abstract class AbstractJDBCDriver {
private static final Logger logger = Logger.getLogger(AbstractJDBCDriver.class);
protected Connection connection;
protected SQLProvider sqlProvider;
private String jdbcConnectionUrl;
private String jdbcDriverClass;
private DataSource dataSource;
private Executor networkTimeoutExecutor;
private int networkTimeoutMillis;
private String user;
private String password;
public AbstractJDBCDriver() {
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public AbstractJDBCDriver(SQLProvider sqlProvider, String jdbcConnectionUrl, String user, String password, String jdbcDriverClass) {
this.jdbcConnectionUrl = jdbcConnectionUrl;
this.user = user;
this.password = password;
this.jdbcDriverClass = jdbcDriverClass;
this.sqlProvider = sqlProvider;
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public AbstractJDBCDriver(DataSource dataSource, SQLProvider provider) {
this.dataSource = dataSource;
this.sqlProvider = provider;
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public void start() throws SQLException {
connect();
synchronized (connection) {
createSchema();
prepareStatements();
}
}
public AbstractJDBCDriver(Connection connection, SQLProvider sqlProvider) {
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
} else {
this.connection = connection;
}
this.sqlProvider = sqlProvider;
this.networkTimeoutExecutor = null;
this.networkTimeoutMillis = -1;
}
public void stop() throws SQLException {
synchronized (connection) {
if (sqlProvider.closeConnectionOnShutdown()) {
try {
connection.setAutoCommit(true);
connection.close();
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
throw e;
}
}
}
}
protected abstract void prepareStatements() throws SQLException;
protected abstract void createSchema() throws SQLException;
protected final void createTable(String... schemaSqls) throws SQLException {
createTableIfNotExists(sqlProvider.getTableName(), schemaSqls);
}
private void connect() throws SQLException {
if (connection == null) {
if (dataSource != null) {
try {
connection = dataSource.getConnection();
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
}
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
throw e;
}
} else {
try {
if (jdbcDriverClass == null || jdbcDriverClass.isEmpty()) {
throw new IllegalStateException("jdbcDriverClass is null or empty!");
}
if (jdbcConnectionUrl == null || jdbcConnectionUrl.isEmpty()) {
throw new IllegalStateException("jdbcConnectionUrl is null or empty!");
}
final Driver dbDriver = getDriver(jdbcDriverClass);
Properties properties = new Properties();
if (user != null) {
properties.setProperty("user", user);
properties.setProperty("password", password);
}
connection = dbDriver.connect(jdbcConnectionUrl, properties);
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
}
if (connection == null) {
throw new IllegalStateException("the driver: " + jdbcDriverClass + " isn't able to connect to the requested url: " + jdbcConnectionUrl);
}
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
ActiveMQJournalLogger.LOGGER.error("Unable to connect to database using URL: " + jdbcConnectionUrl);
throw e;
}
}
if (this.networkTimeoutMillis >= 0 && this.networkTimeoutExecutor == null) {
logger.warn("Unable to set a network timeout on the JDBC connection: networkTimeoutExecutor is null");
}
if (this.networkTimeoutMillis >= 0 && this.networkTimeoutExecutor != null) {
try {
connection.setNetworkTimeout(this.networkTimeoutExecutor, this.networkTimeoutMillis);
} catch (SQLException e) {
logger.warn(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e));
ActiveMQJournalLogger.LOGGER.warn("Unable to set a network timeout on the JDBC connection");
} catch (Throwable throwable) {
//it included SecurityExceptions and UnsupportedOperationException
logger.warn("Unable to set a network timeout on the JDBC connection", throwable);
}
}
}
}
public void destroy() throws Exception {
final String dropTableSql = "DROP TABLE " + sqlProvider.getTableName();
try {
connection.setAutoCommit(false);
try (Statement statement = connection.createStatement()) {
statement.executeUpdate(dropTableSql);
}
connection.commit();
} catch (SQLException e) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e, dropTableSql));
try {
connection.rollback();
} catch (SQLException rollbackEx) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), rollbackEx, dropTableSql));
throw rollbackEx;
}
throw e;
}
}
private void createTableIfNotExists(String tableName, String... sqls) throws SQLException {
logger.tracef("Validating if table %s didn't exist before creating", tableName);
try {
connection.setAutoCommit(false);
final boolean tableExists;
try (ResultSet rs = connection.getMetaData().getTables(null, null, tableName, null)) {
if (rs == null || !rs.next()) {
tableExists = false;
if (logger.isTraceEnabled()) {
logger.tracef("Table %s did not exist, creating it with SQL=%s", tableName, Arrays.toString(sqls));
}
if (rs != null) {
final SQLWarning sqlWarning = rs.getWarnings();
if (sqlWarning != null) {
logger.warn(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), sqlWarning));
}
}
} else {
tableExists = true;
}
}
if (tableExists) {
logger.tracef("Validating if the existing table %s is initialized or not", tableName);
try (Statement statement = connection.createStatement();
ResultSet cntRs = statement.executeQuery(sqlProvider.getCountJournalRecordsSQL())) {
logger.tracef("Validation of the existing table %s initialization is started", tableName);
int rows;
if (cntRs.next() && (rows = cntRs.getInt(1)) > 0) {
logger.tracef("Table %s did exist but is not empty. Skipping initialization. Found %d rows.", tableName, rows);
if (logger.isDebugEnabled()) {
final long expectedRows = Stream.of(sqls).map(String::toUpperCase).filter(sql -> sql.contains("INSERT INTO")).count();
if (rows < expectedRows) {
logger.debug("Table " + tableName + " was expected to contain " + expectedRows + " rows while it has " + rows + " rows.");
}
}
connection.commit();
return;
} else {
sqls = Stream.of(sqls).filter(sql -> {
final String upperCaseSql = sql.toUpperCase();
return !(upperCaseSql.contains("CREATE TABLE") || upperCaseSql.contains("CREATE INDEX"));
}).toArray(String[]::new);
if (sqls.length > 0) {
logger.tracef("Table %s did exist but is empty. Starting initialization.", tableName);
} else {
logger.tracef("Table %s did exist but is empty. Initialization completed: no initialization statements left.", tableName);
}
}
} catch (SQLException e) {
//that's not a real issue and do not deserve any user-level log:
//some DBMS just return stale information about table existence
//and can fail on later attempts to access them
if (logger.isTraceEnabled()) {
logger.trace(JDBCUtils.appendSQLExceptionDetails(new StringBuilder("Can't verify the initialization of table ").append(tableName).append(" due to:"), e, sqlProvider.getCountJournalRecordsSQL()));
}
try {
connection.rollback();
} catch (SQLException rollbackEx) {
logger.debug("Rollback failed while validating initialization of a table", rollbackEx);
}
connection.setAutoCommit(false);
logger.tracef("Table %s seems to exist, but we can't verify the initialization. Keep trying to create and initialize.", tableName);
}
}
if (sqls.length > 0) {
try (Statement statement = connection.createStatement()) {
for (String sql : sqls) {
statement.executeUpdate(sql);
final SQLWarning statementSqlWarning = statement.getWarnings();
if (statementSqlWarning != null) {
logger.warn(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), statementSqlWarning, sql));
}
}
}
connection.commit();
}
} catch (SQLException e) {
final String sqlStatements = String.join("\n", sqls);
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), e, sqlStatements));
try {
connection.rollback();
} catch (SQLException rollbackEx) {
logger.error(JDBCUtils.appendSQLExceptionDetails(new StringBuilder(), rollbackEx, sqlStatements));
throw rollbackEx;
}
throw e;
}
}
private Driver getDriver(String className) {
try {
Driver driver = (Driver) Class.forName(className).newInstance();
// Shutdown the derby if using the derby embedded driver.
if (className.equals("org.apache.derby.jdbc.EmbeddedDriver")) {
Runtime.getRuntime().addShutdownHook(new Thread() {
@Override
public void run() {
try {
DriverManager.getConnection("jdbc:derby:;shutdown=true");
} catch (Exception e) {
}
}
});
}
return driver;
} catch (ClassNotFoundException cnfe) {
throw new RuntimeException("Could not find class: " + className);
} catch (Exception e) {
throw new RuntimeException("Unable to instantiate driver class: ", e);
}
}
public Connection getConnection() {
return connection;
}
public final void setConnection(Connection connection) {
if (this.connection == null) {
if (logger.isTraceEnabled() && !(connection instanceof LoggingConnection)) {
this.connection = new LoggingConnection(connection, logger);
} else {
this.connection = connection;
}
}
}
public void setSqlProvider(SQLProvider sqlProvider) {
this.sqlProvider = sqlProvider;
}
public void setJdbcConnectionUrl(String jdbcConnectionUrl) {
this.jdbcConnectionUrl = jdbcConnectionUrl;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public void setJdbcDriverClass(String jdbcDriverClass) {
this.jdbcDriverClass = jdbcDriverClass;
}
public void setDataSource(DataSource dataSource) {
this.dataSource = dataSource;
}
public void setNetworkTimeout(Executor executor, int milliseconds) {
this.networkTimeoutExecutor = executor;
this.networkTimeoutMillis = milliseconds;
}
}
| NO-JIRA Fixing memory leak on testsuite
AbstractJDBCDriver would hold an instance to AbstractJDBCDriver through an innner class,
that would hold an ActiveMQServerImpl.
That means Servers would be leaking for the entire duration of the testsuite when using JDBC.
| artemis-jdbc-store/src/main/java/org/apache/activemq/artemis/jdbc/store/drivers/AbstractJDBCDriver.java | NO-JIRA Fixing memory leak on testsuite |
|
Java | apache-2.0 | 8866c6429f057dbd2909051622b966c80d2bc05b | 0 | DescartesResearch/Pet-Supply-Store,DescartesResearch/Pet-Supply-Store,DescartesResearch/Pet-Supply-Store | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tools.descartes.petstore.image.setup;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.imageio.ImageIO;
import tools.descartes.petstore.entities.ImageSize;
import tools.descartes.petstore.entities.Product;
import tools.descartes.petstore.image.ImageDB;
import tools.descartes.petstore.image.ImageProvider;
import tools.descartes.petstore.image.StoreImage;
import tools.descartes.petstore.image.cache.FirstInFirstOut;
import tools.descartes.petstore.image.cache.IDataCache;
import tools.descartes.petstore.image.cache.LastInFirstOut;
import tools.descartes.petstore.image.cache.LeastFrequentlyUsed;
import tools.descartes.petstore.image.cache.LeastRecentlyUsed;
import tools.descartes.petstore.image.cache.MostRecentlyUsed;
import tools.descartes.petstore.image.cache.RandomReplacement;
import tools.descartes.petstore.image.cache.rules.CacheAll;
import tools.descartes.petstore.image.storage.DriveStorage;
import tools.descartes.petstore.image.storage.IDataStorage;
import tools.descartes.petstore.image.storage.LimitedDriveStorage;
import tools.descartes.petstore.image.storage.rules.StoreAll;
import tools.descartes.petstore.image.storage.rules.StoreLargeImages;
import tools.descartes.petstore.registryclient.Service;
import tools.descartes.petstore.registryclient.rest.LoadBalancedCRUDOperations;
public class SetupController {
public final static Path STD_WORKING_DIR = Paths.get("images");
private static SetupController instance = new SetupController();
private List<Long> productIDs = new ArrayList<>();
private StorageRule storageRule = StorageRule.STD_STORAGE_RULE;
private CachingRule cachingRule = CachingRule.STD_CACHING_RULE;
private Path workingDir = STD_WORKING_DIR;
private long cacheSize = IDataCache.STD_MAX_CACHE_SIZE;
private StorageMode storageMode = StorageMode.STD_STORAGE_MODE;
private CachingMode cachingMode = CachingMode.STD_CACHING_MODE;
private int nrOfImagesToGenerate = 0;
private int nrOfImagesPreExisting = 0;
private ImageDB imgDB = new ImageDB();
private List<StoreImage> preCacheImg = new ArrayList<>();
private ImageCreatorRunner imgCreatorRunner;
private Thread imgCreatorThread;
private SetupController() {
createWorkingDir(workingDir);
}
public void setWorkingDir(Path path) {
if (path != null)
workingDir = path;
}
public static SetupController getInstance() {
return instance;
}
private List<Product> fetchProducts() {
return LoadBalancedCRUDOperations.getEntities(Service.PERSISTENCE, "products",
Product.class, -1, -1);
}
private List<Long> convertToIDs(List<Product> products) {
return products.stream().map(product -> product.getId()).collect(Collectors.toList());
}
public void generateImages() {
List<Product> products = fetchProducts();
if (products == null)
return;
List<Long> productIDs = convertToIDs(products);
generateImages(productIDs, productIDs.size());
}
public void generateImages(int nrOfImagesToGenerate) {
List<Product> products = fetchProducts();
if (products == null)
return;
List<Long> productIDs = convertToIDs(products);
generateImages(productIDs, nrOfImagesToGenerate);
}
public void generateImages(List<Long> productIDs, int nrOfImagesToGenerate) {
if (productIDs == null || nrOfImagesToGenerate <= 0)
return;
this.productIDs = productIDs;
this.nrOfImagesToGenerate = nrOfImagesToGenerate;
// Create images
imgCreatorRunner = new ImageCreatorRunner(this.productIDs, STD_WORKING_DIR, imgDB,
ImageCreator.STD_NR_OF_SHAPES_PER_IMAGE, ImageCreator.STD_SEED, ImageSize.STD_IMAGE_SIZE,
nrOfImagesToGenerate);
imgCreatorThread = new Thread(imgCreatorRunner);
imgCreatorThread.start();
}
private void createWorkingDir(Path directory) {
if (!directory.toFile().exists())
if (!directory.toFile().mkdir())
throw new IllegalArgumentException("Standard working directory \""
+ directory.toAbsolutePath() + "\" could not be created.");
}
public void detectPreExistingImages() {
detectPreExistingImages(imgDB, workingDir);
}
public void detectPreExistingImages(Path directory) {
detectPreExistingImages(imgDB, directory);
}
public void detectPreExistingImages(ImageDB db, Path directory) {
if (db == null)
throw new NullPointerException("Image database is null.");
if (directory == null)
throw new NullPointerException("Working directory is null.");
createWorkingDir(directory);
ImageIDFactory idFactory = ImageIDFactory.getInstance();
URL url = this.getClass().getResource("front.png");
Path dir = null;
try {
String path = URLDecoder.decode(url.getPath(), "UTF-8");
if (path.contains(":"))
path = path.substring(3);
dir = Paths.get(path).getParent();
} catch (UnsupportedEncodingException e) {
return;
}
File currentDir = dir.toFile();
if (currentDir.isDirectory()) {
for (File file : currentDir.listFiles()) {
if (file.isFile() && file.getName().endsWith(StoreImage.STORE_IMAGE_FORMAT)) {
long imageID = idFactory.getNextImageID();
// Copy files to correct file with the image id number
try {
BufferedImage buffImg = ImageIO.read(file);
if (buffImg == null)
continue;
db.setImageMapping(file.getName().substring(0,
file.getName().length() - StoreImage.STORE_IMAGE_FORMAT.length() - 1),
imageID, ImageSize.FULL);
StoreImage img = new StoreImage(imageID, buffImg, ImageSize.FULL);
preCacheImg.add(img);
Files.write(directory.resolve(String.valueOf(imageID)), img.getByteArray(),
StandardOpenOption.CREATE,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING);
} catch (IOException e) {
}
// Increment to have correct number of images for the limited drive storage
nrOfImagesPreExisting++;
}
}
}
}
public void setCachingMode(String cachingMode) {
this.cachingMode = CachingMode.getCachingModeFromString(cachingMode);
}
public void setCachingRule(String cachingRule) {
this.cachingRule = CachingRule.getCachingRuleFromString(cachingRule);
}
public void setCacheSize(long cacheSize) {
this.cacheSize = cacheSize;
}
public void setStorageMode(String storageMode) {
this.storageMode = StorageMode.getStorageModeFromString(storageMode);
}
public void setStorageRule(String storageRule) {
this.storageRule = StorageRule.getStorageRuleFromString(storageRule);
}
public void deleteAllCreatedData() {
deleteUnusedImages(new ArrayList<>());
}
public void finalizeSetup() {
Predicate<StoreImage> storagePredicate = null;
switch (storageRule) {
case ALL: storagePredicate = new StoreAll<StoreImage>(); break;
case FULL_SIZE_IMG: storagePredicate = new StoreLargeImages(); break;
default: storagePredicate = new StoreAll<StoreImage>(); break;
}
IDataStorage<StoreImage> storage = null;
switch (storageMode) {
case DRIVE: storage = new DriveStorage(workingDir, imgDB, storagePredicate); break;
case DRIVE_LIMITED: storage = new LimitedDriveStorage(workingDir, imgDB,
storagePredicate, nrOfImagesToGenerate + nrOfImagesPreExisting); break;
default: storage = new DriveStorage(workingDir, imgDB, storagePredicate); break;
}
Predicate<StoreImage> cachePredicate = null;
switch (cachingRule) {
case ALL: cachePredicate = new CacheAll<StoreImage>(); break;
default: cachePredicate = new CacheAll<StoreImage>(); break;
}
IDataCache<StoreImage> cache = null;
switch (cachingMode) {
case FIFO: cache = new FirstInFirstOut<StoreImage>(storage, cacheSize, cachePredicate); break;
case LIFO: cache = new LastInFirstOut<StoreImage>(storage, cacheSize, cachePredicate); break;
case RR: cache = new RandomReplacement<StoreImage>(storage, cacheSize, cachePredicate); break;
case LFU: cache = new LeastFrequentlyUsed<StoreImage>(storage, cacheSize, cachePredicate); break;
case LRU: cache = new LeastRecentlyUsed<StoreImage>(storage, cacheSize, cachePredicate); break;
case MRU: cache = new MostRecentlyUsed<StoreImage>(storage, cacheSize, cachePredicate); break;
case NONE: break;
default: break;
}
ImageProvider provider = ImageProvider.getInstance();
provider.setImageDB(imgDB);
provider.setImageCreatorRunner(imgCreatorRunner);
if (cache == null) {
provider.setStorage(storage);
} else {
for (StoreImage i : preCacheImg)
cache.cacheData(i);
provider.setStorage(cache);
}
}
public Path getWorkingDir() {
return workingDir;
}
private void deleteUnusedImages(List<Long> imagesToKeep) {
File currentDir = workingDir.toFile();
if (currentDir.isDirectory()) {
for (File file : currentDir.listFiles()) {
if (file.isFile() && !imagesToKeep.contains(Long.parseLong(file.getName()))) {
file.delete();
}
}
currentDir.delete();
}
}
public void reconfiguration() {
// Stop image creation to have sort of a steady state to work on
imgCreatorRunner.stopCreation();
while (imgCreatorRunner.isRunning()) {
try {
Thread.sleep(imgCreatorRunner.getAvgCreationTime());
} catch (InterruptedException e) {
}
}
// Get all the new products, compare them with the current database and determine which product is known and
// which is not
ImageSize biggest = ImageSize.getBiggestSize();
List<Long> productIDs = convertToIDs(fetchProducts());
List<Long> productsNotInDB = productIDs.stream()
.filter(p -> !imgDB.hasImageID(p, biggest))
.collect(Collectors.toList());
List<Long> productsInDB = productIDs.stream()
.filter(p -> imgDB.hasImageID(p, biggest))
.collect(Collectors.toList());
// Make a copy of our database, remove all products from the old one
ImageDB copy = new ImageDB(imgDB);
imgDB.removeProductImages();
// Add all known products with their original image id to the empty database and delete files on the disk
// that are not used anymore
productsInDB.forEach(p -> imgDB.setImageMapping(p, copy.getImageID(p, biggest), biggest));
List<Long> imagesToKeep = productsInDB.stream()
.map(p -> copy.getImageID(p, biggest))
.collect(Collectors.toList());
imagesToKeep.addAll(copy.getAllWebImageIDs(biggest));
deleteUnusedImages(imagesToKeep);
// Start our new image creator thread and finish the reconfiguration
generateImages(productsNotInDB, productsNotInDB.size());
finalizeSetup();
}
}
| services/tools.descartes.petstore.image/src/main/java/tools/descartes/petstore/image/setup/SetupController.java | /**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package tools.descartes.petstore.image.setup;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.imageio.ImageIO;
import tools.descartes.petstore.entities.ImageSize;
import tools.descartes.petstore.entities.Product;
import tools.descartes.petstore.image.ImageDB;
import tools.descartes.petstore.image.ImageProvider;
import tools.descartes.petstore.image.StoreImage;
import tools.descartes.petstore.image.cache.FirstInFirstOut;
import tools.descartes.petstore.image.cache.IDataCache;
import tools.descartes.petstore.image.cache.LastInFirstOut;
import tools.descartes.petstore.image.cache.LeastFrequentlyUsed;
import tools.descartes.petstore.image.cache.LeastRecentlyUsed;
import tools.descartes.petstore.image.cache.MostRecentlyUsed;
import tools.descartes.petstore.image.cache.RandomReplacement;
import tools.descartes.petstore.image.cache.rules.CacheAll;
import tools.descartes.petstore.image.storage.DriveStorage;
import tools.descartes.petstore.image.storage.IDataStorage;
import tools.descartes.petstore.image.storage.LimitedDriveStorage;
import tools.descartes.petstore.image.storage.rules.StoreAll;
import tools.descartes.petstore.image.storage.rules.StoreLargeImages;
import tools.descartes.petstore.registryclient.Service;
import tools.descartes.petstore.registryclient.rest.LoadBalancedCRUDOperations;
public class SetupController {
public final static Path STD_WORKING_DIR = Paths.get("images");
private static SetupController instance = new SetupController();
private List<Long> productIDs = new ArrayList<>();
private StorageRule storageRule = StorageRule.STD_STORAGE_RULE;
private CachingRule cachingRule = CachingRule.STD_CACHING_RULE;
private Path workingDir = STD_WORKING_DIR;
private long cacheSize = IDataCache.STD_MAX_CACHE_SIZE;
private StorageMode storageMode = StorageMode.STD_STORAGE_MODE;
private CachingMode cachingMode = CachingMode.STD_CACHING_MODE;
private int nrOfImagesToGenerate = 0;
private int nrOfImagesPreExisting = 0;
private ImageDB imgDB = new ImageDB();
private List<StoreImage> preCacheImg = new ArrayList<>();
private ImageCreatorRunner imgCreatorRunner;
private Thread imgCreatorThread;
private SetupController() {
createWorkingDir(workingDir);
}
public void setWorkingDir(Path path) {
if (path != null)
workingDir = path;
}
public static SetupController getInstance() {
return instance;
}
private List<Product> fetchProducts() {
return LoadBalancedCRUDOperations.getEntities(Service.PERSISTENCE, "products",
Product.class, -1, -1);
}
private List<Long> convertToIDs(List<Product> products) {
return products.stream().map(product -> product.getId()).collect(Collectors.toList());
}
public void generateImages() {
List<Product> products = fetchProducts();
if (products == null)
return;
List<Long> productIDs = convertToIDs(products);
generateImages(productIDs, productIDs.size());
}
public void generateImages(int nrOfImagesToGenerate) {
List<Product> products = fetchProducts();
if (products == null)
return;
List<Long> productIDs = convertToIDs(products);
generateImages(productIDs, nrOfImagesToGenerate);
}
public void generateImages(List<Long> productIDs, int nrOfImagesToGenerate) {
if (productIDs == null || nrOfImagesToGenerate <= 0)
return;
this.productIDs = productIDs;
this.nrOfImagesToGenerate = nrOfImagesToGenerate;
// Create images
imgCreatorRunner = new ImageCreatorRunner(this.productIDs, STD_WORKING_DIR, imgDB,
ImageCreator.STD_NR_OF_SHAPES_PER_IMAGE, ImageCreator.STD_SEED, ImageSize.STD_IMAGE_SIZE,
nrOfImagesToGenerate);
imgCreatorThread = new Thread(imgCreatorRunner);
imgCreatorThread.start();
}
private void createWorkingDir(Path directory) {
if (!directory.toFile().exists())
if (!directory.toFile().mkdir())
throw new IllegalArgumentException("Standard working directory \""
+ directory.toAbsolutePath() + "\" could not be created.");
}
public void detectPreExistingImages() {
detectPreExistingImages(imgDB, workingDir);
}
public void detectPreExistingImages(Path directory) {
detectPreExistingImages(imgDB, directory);
}
public void detectPreExistingImages(ImageDB db, Path directory) {
if (db == null)
throw new NullPointerException("Image database is null.");
if (directory == null)
throw new NullPointerException("Working directory is null.");
createWorkingDir(directory);
ImageIDFactory idFactory = ImageIDFactory.getInstance();
URL url = this.getClass().getResource("front.png");
Path dir = null;
try {
String path = URLDecoder.decode(url.getPath(), "UTF-8");
if (path.contains(":"))
path = path.substring(3);
dir = Paths.get(path).getParent();
} catch (UnsupportedEncodingException e) {
return;
}
File currentDir = dir.toFile();
if (currentDir.isDirectory()) {
for (File file : currentDir.listFiles()) {
if (file.isFile() && file.getName().endsWith(StoreImage.STORE_IMAGE_FORMAT)) {
long imageID = idFactory.getNextImageID();
// Copy files to correct file with the image id number
try {
BufferedImage buffImg = ImageIO.read(file);
if (buffImg == null)
continue;
db.setImageMapping(file.getName().substring(0,
file.getName().length() - StoreImage.STORE_IMAGE_FORMAT.length() - 1),
imageID, ImageSize.FULL);
StoreImage img = new StoreImage(imageID, buffImg, ImageSize.FULL);
preCacheImg.add(img);
Files.write(directory.resolve(String.valueOf(imageID)), img.getByteArray(),
StandardOpenOption.CREATE,
StandardOpenOption.WRITE,
StandardOpenOption.TRUNCATE_EXISTING);
} catch (IOException e) {
}
// Increment to have correct number of images for the limited drive storage
nrOfImagesPreExisting++;
}
}
}
}
public void setCachingMode(String cachingMode) {
this.cachingMode = CachingMode.getCachingModeFromString(cachingMode);
}
public void setCachingRule(String cachingRule) {
this.cachingRule = CachingRule.getCachingRuleFromString(cachingRule);
}
public void setCacheSize(long cacheSize) {
this.cacheSize = cacheSize;
}
public void setStorageMode(String storageMode) {
this.storageMode = StorageMode.getStorageModeFromString(storageMode);
}
public void setStorageRule(String storageRule) {
this.storageRule = StorageRule.getStorageRuleFromString(storageRule);
}
public void deleteAllCreatedData() {
deleteUnusedImages(new ArrayList<>());
}
public void finalizeSetup() {
Predicate<StoreImage> storagePredicate = null;
switch (storageRule) {
case ALL: storagePredicate = new StoreAll<StoreImage>(); break;
case FULL_SIZE_IMG: storagePredicate = new StoreLargeImages(); break;
default: storagePredicate = new StoreAll<StoreImage>(); break;
}
IDataStorage<StoreImage> storage = null;
switch (storageMode) {
case DRIVE: storage = new DriveStorage(workingDir, imgDB, storagePredicate); break;
case DRIVE_LIMITED: storage = new LimitedDriveStorage(workingDir, imgDB,
storagePredicate, nrOfImagesToGenerate + nrOfImagesPreExisting); break;
default: storage = new DriveStorage(workingDir, imgDB, storagePredicate); break;
}
Predicate<StoreImage> cachePredicate = null;
switch (cachingRule) {
case ALL: cachePredicate = new CacheAll<StoreImage>(); break;
default: cachePredicate = new CacheAll<StoreImage>(); break;
}
IDataCache<StoreImage> cache = null;
switch (cachingMode) {
case FIFO: cache = new FirstInFirstOut<StoreImage>(storage, cacheSize, cachePredicate); break;
case LIFO: cache = new LastInFirstOut<StoreImage>(storage, cacheSize, cachePredicate); break;
case RR: cache = new RandomReplacement<StoreImage>(storage, cacheSize, cachePredicate); break;
case LFU: cache = new LeastFrequentlyUsed<StoreImage>(storage, cacheSize, cachePredicate); break;
case LRU: cache = new LeastRecentlyUsed<StoreImage>(storage, cacheSize, cachePredicate); break;
case MRU: cache = new MostRecentlyUsed<StoreImage>(storage, cacheSize, cachePredicate); break;
case NONE: break;
default: break;
}
ImageProvider provider = ImageProvider.getInstance();
provider.setImageDB(imgDB);
provider.setImageCreatorRunner(imgCreatorRunner);
if (cache == null) {
provider.setStorage(storage);
} else {
for (StoreImage i : preCacheImg)
cache.cacheData(i);
provider.setStorage(cache);
}
}
public Path getWorkingDir() {
return workingDir;
}
private void deleteUnusedImages(List<Long> imagesToKeep) {
File currentDir = workingDir.toFile();
System.out.println("Removing directory: " + currentDir.toPath().toAbsolutePath().toString() + " with Thread " + Thread.currentThread().getName());
if (currentDir.isDirectory()) {
for (File file : currentDir.listFiles()) {
if (file.isFile() && !imagesToKeep.contains(Long.parseLong(file.getName()))) {
file.delete();
}
}
currentDir.delete();
}
}
public void reconfiguration() {
// Stop image creation to have sort of a steady state to work on
imgCreatorRunner.stopCreation();
while (imgCreatorRunner.isRunning()) {
try {
Thread.sleep(imgCreatorRunner.getAvgCreationTime());
} catch (InterruptedException e) {
}
}
// Get all the new products, compare them with the current database and determine which product is known and
// which is not
ImageSize biggest = ImageSize.getBiggestSize();
List<Long> productIDs = convertToIDs(fetchProducts());
List<Long> productsNotInDB = productIDs.stream()
.filter(p -> !imgDB.hasImageID(p, biggest))
.collect(Collectors.toList());
List<Long> productsInDB = productIDs.stream()
.filter(p -> imgDB.hasImageID(p, biggest))
.collect(Collectors.toList());
// Make a copy of our database, remove all products from the old one
ImageDB copy = new ImageDB(imgDB);
imgDB.removeProductImages();
// Add all known products with their original image id to the empty database and delete files on the disk
// that are not used anymore
productsInDB.forEach(p -> imgDB.setImageMapping(p, copy.getImageID(p, biggest), biggest));
List<Long> imagesToKeep = productsInDB.stream()
.map(p -> copy.getImageID(p, biggest))
.collect(Collectors.toList());
imagesToKeep.addAll(copy.getAllWebImageIDs(biggest));
deleteUnusedImages(imagesToKeep);
// Start our new image creator thread and finish the reconfiguration
generateImages(productsNotInDB, productsNotInDB.size());
finalizeSetup();
}
}
| Removed debugging output from SetupController
| services/tools.descartes.petstore.image/src/main/java/tools/descartes/petstore/image/setup/SetupController.java | Removed debugging output from SetupController |
|
Java | apache-2.0 | f95a74c5e090469e541c205e18b759e5e94d8339 | 0 | inventiLT/inventi-wicket,inventiLT/inventi-wicket,inventiLT/inventi-wicket | package lt.inventi.wicket.component.bootstrap.form;
import java.util.Arrays;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.markup.html.form.RadioChoice;
import org.apache.wicket.model.IModel;
/**
* Radio choice which creates two values for a boolean-backed model.
* <p>
* This control serves as an alternative to checkboxes, especially in
* bootstrap's horizontal forms. See <a href=
* "http://ux.stackexchange.com/questions/22532/laying-out-checkboxes-in-forms"
* >this UX Stackexchange question</a> for more information and supporting
* ideas.
* <p>
* You can localize the labels for true/false options by providing localization
* properties for:
* <ol>
* <li>BooleanRadioChoice.TRUE</li>
* <li>BooleanRadioChoice.FALSE</li>
* </ol>
*
* @author vplatonov
*
*/
public class BootstrapBooleanRadioChoice extends RadioChoice<Boolean> {
public BootstrapBooleanRadioChoice(String id) {
super(id, Arrays.asList(Boolean.TRUE, Boolean.FALSE), new BooleanRadioChoiceRenderer());
}
public BootstrapBooleanRadioChoice(String id, IModel<Boolean> model) {
super(id, model, Arrays.asList(Boolean.TRUE, Boolean.FALSE), new BooleanRadioChoiceRenderer());
}
private static class BooleanRadioChoiceRenderer implements IChoiceRenderer<Boolean> {
@Override
public Object getDisplayValue(Boolean object) {
return Boolean.TRUE.equals(object) ? "BooleanRadioChoice.TRUE" : "BooleanRadioChoice.FALSE";
}
@Override
public String getIdValue(Boolean object, int index) {
return String.valueOf(object);
}
}
public String getSuffix(){
return "";
}
@Override
protected boolean localizeDisplayValues() {
return true;
}
}
| inventi-wicket-bootstrap/src/main/java/lt/inventi/wicket/component/bootstrap/form/BootstrapBooleanRadioChoice.java | package lt.inventi.wicket.component.bootstrap.form;
import java.util.Arrays;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.model.IModel;
/**
* Radio choice which creates two values for a boolean-backed model.
* <p>
* This control serves as an alternative to checkboxes, especially in
* bootstrap's horizontal forms. See <a href=
* "http://ux.stackexchange.com/questions/22532/laying-out-checkboxes-in-forms"
* >this UX Stackexchange question</a> for more information and supporting
* ideas.
* <p>
* You can localize the labels for true/false options by providing localization
* properties for:
* <ol>
* <li>BooleanRadioChoice.TRUE</li>
* <li>BooleanRadioChoice.FALSE</li>
* </ol>
*
* @author vplatonov
*
*/
public class BootstrapBooleanRadioChoice extends BootstrapRadioChoice<Boolean> {
public BootstrapBooleanRadioChoice(String id) {
super(id, Arrays.asList(Boolean.TRUE, Boolean.FALSE), new BooleanRadioChoiceRenderer());
}
public BootstrapBooleanRadioChoice(String id, IModel<Boolean> model) {
super(id, model, Arrays.asList(Boolean.TRUE, Boolean.FALSE), new BooleanRadioChoiceRenderer());
}
private static class BooleanRadioChoiceRenderer implements IChoiceRenderer<Boolean> {
@Override
public Object getDisplayValue(Boolean object) {
return Boolean.TRUE.equals(object) ? "BooleanRadioChoice.TRUE" : "BooleanRadioChoice.FALSE";
}
@Override
public String getIdValue(Boolean object, int index) {
return String.valueOf(object);
}
}
@Override
protected boolean localizeDisplayValues() {
return true;
}
}
| Do not nest inputs into labels
| inventi-wicket-bootstrap/src/main/java/lt/inventi/wicket/component/bootstrap/form/BootstrapBooleanRadioChoice.java | Do not nest inputs into labels |
|
Java | apache-2.0 | 05b54dadc163f331f29700e3d6ac798160415b13 | 0 | b2ihealthcare/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl,b2ihealthcare/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,IHTSDO/snow-owl,b2ihealthcare/snow-owl | /*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.api.impl.codesystem.domain;
import org.hibernate.validator.constraints.NotEmpty;
import com.b2international.snowowl.api.codesystem.domain.ICodeSystem;
/**
*/
public class CodeSystem implements ICodeSystem {
public static Builder builder() {
return new Builder();
}
public static Builder builder(final com.b2international.snowowl.datastore.ICodeSystem input) {
return builder()
.oid(input.getOid())
.name(input.getName())
.shortName(input.getShortName())
.link(input.getOrgLink())
.language(input.getLanguage())
.citation(input.getCitation())
.branchPath(input.getBranchPath())
.iconPath(input.getIconPath())
.terminologyId(input.getSnowOwlId())
.repositoryId(input.getRepositoryUuid())
.extensionOf(input.getExtensionOf());
}
public static class Builder {
private String oid;
private String name;
private String shortName;
private String link;
private String language;
private String citation;
private String branchPath;
private String iconPath;
private String terminologyId;
private String repositoryId;
private String extensionOf;
private Builder() {}
public Builder oid(final String oid) {
this.oid = oid;
return getSelf();
}
public Builder name(final String name) {
this.name = name;
return getSelf();
}
public Builder shortName(final String shortName) {
this.shortName = shortName;
return getSelf();
}
public Builder link(final String link) {
this.link = link;
return getSelf();
}
public Builder language(final String language) {
this.language = language;
return getSelf();
}
public Builder citation(final String citation) {
this.citation = citation;
return getSelf();
}
public Builder branchPath(final String branchPath) {
this.branchPath = branchPath;
return getSelf();
}
public Builder iconPath(final String iconPath) {
this.iconPath = iconPath;
return getSelf();
}
public Builder terminologyId(final String terminologyId) {
this.terminologyId = terminologyId;
return getSelf();
}
public Builder repositoryId(final String repositoryId) {
this.repositoryId = repositoryId;
return getSelf();
}
public Builder extensionOf(final String extensionOf) {
this.extensionOf = extensionOf;
return getSelf();
}
public CodeSystem build() {
return new CodeSystem(
oid,
name,
shortName,
link,
language,
citation,
branchPath,
iconPath,
terminologyId,
repositoryId,
extensionOf);
}
private Builder getSelf() {
return this;
}
}
public CodeSystem() {
}
private CodeSystem(final String oid, final String name, final String shortName, final String link, final String language,
final String citation, final String branchPath, final String iconPath, final String terminologyId, final String repositoryId,
final String extensionOf) {
this.oid = oid;
this.name = name;
this.shortName = shortName;
this.organizationLink = link;
this.primaryLanguage = language;
this.citation = citation;
this.branchPath = branchPath;
this.iconPath = iconPath;
this.terminologyId = terminologyId;
this.repositoryUuid = repositoryId;
this.extensionOf = extensionOf;
}
private String oid;
@NotEmpty
private String name;
@NotEmpty
private String shortName;
private String organizationLink;
@NotEmpty
private String primaryLanguage;
@NotEmpty
private String citation;
@NotEmpty
private String branchPath;
@NotEmpty
private String iconPath;
@NotEmpty
private String terminologyId;
@NotEmpty
private String repositoryUuid;
private String extensionOf;
@Override
public String getOid() {
return oid;
}
@Override
public String getName() {
return name;
}
@Override
public String getShortName() {
return shortName;
}
@Override
public String getOrganizationLink() {
return organizationLink;
}
@Override
public String getPrimaryLanguage() {
return primaryLanguage;
}
@Override
public String getCitation() {
return citation;
}
@Override
public String getBranchPath() {
return branchPath;
}
@Override
public String getIconPath() {
return iconPath;
}
@Override
public String getTerminologyId() {
return terminologyId;
}
@Override
public String getRepositoryUuid() {
return repositoryUuid;
}
@Override
public String getExtensionOf() {
return extensionOf;
}
public void setOid(final String oid) {
this.oid = oid;
}
public void setName(final String name) {
this.name = name;
}
public void setShortName(final String shortName) {
this.shortName = shortName;
}
public void setOrganizationLink(final String organizationLink) {
this.organizationLink = organizationLink;
}
public void setPrimaryLanguage(final String primaryLanguage) {
this.primaryLanguage = primaryLanguage;
}
public void setCitation(final String citation) {
this.citation = citation;
}
public void setBranchPath(String branchPath) {
this.branchPath = branchPath;
}
public void setIconPath(String iconPath) {
this.iconPath = iconPath;
}
public void setTerminologyId(String terminologyId) {
this.terminologyId = terminologyId;
}
public void setRepositoryUuid(String repositoryUuid) {
this.repositoryUuid = repositoryUuid;
}
public void setExtensionOf(String extensionOf) {
this.extensionOf = extensionOf;
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append("CodeSystem [oid=");
builder.append(oid);
builder.append(", name=");
builder.append(name);
builder.append(", shortName=");
builder.append(shortName);
builder.append(", organizationLink=");
builder.append(organizationLink);
builder.append(", primaryLanguage=");
builder.append(primaryLanguage);
builder.append(", citation=");
builder.append(citation);
builder.append(", branchPath=");
builder.append(branchPath);
builder.append(", iconPath=");
builder.append(iconPath);
builder.append(", repositoryUuid=");
builder.append(repositoryUuid);
builder.append(", extensionOf=");
builder.append(extensionOf);
builder.append("]");
return builder.toString();
}
} | core/com.b2international.snowowl.api.impl/src/com/b2international/snowowl/api/impl/codesystem/domain/CodeSystem.java | /*
* Copyright 2011-2015 B2i Healthcare Pte Ltd, http://b2i.sg
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.b2international.snowowl.api.impl.codesystem.domain;
import org.hibernate.validator.constraints.NotEmpty;
import com.b2international.snowowl.api.codesystem.domain.ICodeSystem;
/**
*/
public class CodeSystem implements ICodeSystem {
private String oid;
@NotEmpty
private String name;
@NotEmpty
private String shortName;
private String organizationLink;
@NotEmpty
private String primaryLanguage;
@NotEmpty
private String citation;
@NotEmpty
private String branchPath;
@NotEmpty
private String iconPath;
@NotEmpty
private String terminologyId;
@NotEmpty
private String repositoryUuid;
private String extensionOf;
@Override
public String getOid() {
return oid;
}
@Override
public String getName() {
return name;
}
@Override
public String getShortName() {
return shortName;
}
@Override
public String getOrganizationLink() {
return organizationLink;
}
@Override
public String getPrimaryLanguage() {
return primaryLanguage;
}
@Override
public String getCitation() {
return citation;
}
@Override
public String getBranchPath() {
return branchPath;
}
@Override
public String getIconPath() {
return iconPath;
}
@Override
public String getTerminologyId() {
return terminologyId;
}
@Override
public String getRepositoryUuid() {
return repositoryUuid;
}
@Override
public String getExtensionOf() {
return extensionOf;
}
public void setOid(final String oid) {
this.oid = oid;
}
public void setName(final String name) {
this.name = name;
}
public void setShortName(final String shortName) {
this.shortName = shortName;
}
public void setOrganizationLink(final String organizationLink) {
this.organizationLink = organizationLink;
}
public void setPrimaryLanguage(final String primaryLanguage) {
this.primaryLanguage = primaryLanguage;
}
public void setCitation(final String citation) {
this.citation = citation;
}
public void setBranchPath(String branchPath) {
this.branchPath = branchPath;
}
public void setIconPath(String iconPath) {
this.iconPath = iconPath;
}
public void setTerminologyId(String terminologyId) {
this.terminologyId = terminologyId;
}
public void setRepositoryUuid(String repositoryUuid) {
this.repositoryUuid = repositoryUuid;
}
public void setExtensionOf(String extensionOf) {
this.extensionOf = extensionOf;
}
@Override
public String toString() {
final StringBuilder builder = new StringBuilder();
builder.append("CodeSystem [oid=");
builder.append(oid);
builder.append(", name=");
builder.append(name);
builder.append(", shortName=");
builder.append(shortName);
builder.append(", organizationLink=");
builder.append(organizationLink);
builder.append(", primaryLanguage=");
builder.append(primaryLanguage);
builder.append(", citation=");
builder.append(citation);
builder.append(", branchPath=");
builder.append(branchPath);
builder.append(", iconPath=");
builder.append(iconPath);
builder.append(", repositoryUuid=");
builder.append(repositoryUuid);
builder.append(", extensionOf=");
builder.append(extensionOf);
builder.append("]");
return builder.toString();
}
} | Builder to build api code systems. | core/com.b2international.snowowl.api.impl/src/com/b2international/snowowl/api/impl/codesystem/domain/CodeSystem.java | Builder to build api code systems. |
|
Java | apache-2.0 | d4166db8fc12b0ac93bcf4498feffc054dbb0c4f | 0 | SlavaPihidko/java_lessons | package qa.paket1;
public class Hello {
public static void main(String[] args) {
hello("world");
}
public static void hello( String somebody){
System.out.println("Hello"+somebody+"!");
}
} | lesson_1/src/main/java/qa/paket1/Hello.java | package qa.paket1;
public class Hello {
public static void main(String[] args) {
System.out.println("Hello world!");
}
} | создали метод hello
| lesson_1/src/main/java/qa/paket1/Hello.java | создали метод hello |
|
Java | apache-2.0 | 93e40c9b36721d501e5ec8cf3cdc3c8590b1fc66 | 0 | eayun/ovirt-engine,walteryang47/ovirt-engine,zerodengxinchao/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,zerodengxinchao/ovirt-engine,eayun/ovirt-engine,zerodengxinchao/ovirt-engine,walteryang47/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,walteryang47/ovirt-engine,zerodengxinchao/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,eayun/ovirt-engine,yingyun001/ovirt-engine,OpenUniversity/ovirt-engine,yingyun001/ovirt-engine,eayun/ovirt-engine,yapengsong/ovirt-engine,walteryang47/ovirt-engine,yapengsong/ovirt-engine,yingyun001/ovirt-engine,yapengsong/ovirt-engine,OpenUniversity/ovirt-engine,zerodengxinchao/ovirt-engine | package org.ovirt.engine.core.vdsbroker.vdsbroker;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.AutoNumaBalanceStatus;
import org.ovirt.engine.core.common.businessentities.CpuStatistics;
import org.ovirt.engine.core.common.businessentities.DiskImageDynamic;
import org.ovirt.engine.core.common.businessentities.DisplayType;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.KdumpStatus;
import org.ovirt.engine.core.common.businessentities.LUNs;
import org.ovirt.engine.core.common.businessentities.NumaNodeStatistics;
import org.ovirt.engine.core.common.businessentities.SessionState;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.StorageType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSDomainsData;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VdsNumaNode;
import org.ovirt.engine.core.common.businessentities.VdsTransparentHugePagesState;
import org.ovirt.engine.core.common.businessentities.VmBalloonInfo;
import org.ovirt.engine.core.common.businessentities.VmBlockJob;
import org.ovirt.engine.core.common.businessentities.VmBlockJobType;
import org.ovirt.engine.core.common.businessentities.VmDynamic;
import org.ovirt.engine.core.common.businessentities.VmExitReason;
import org.ovirt.engine.core.common.businessentities.VmExitStatus;
import org.ovirt.engine.core.common.businessentities.VmGuestAgentInterface;
import org.ovirt.engine.core.common.businessentities.VmJob;
import org.ovirt.engine.core.common.businessentities.VmJobState;
import org.ovirt.engine.core.common.businessentities.VmJobType;
import org.ovirt.engine.core.common.businessentities.VmPauseStatus;
import org.ovirt.engine.core.common.businessentities.VmRngDevice;
import org.ovirt.engine.core.common.businessentities.VmStatistics;
import org.ovirt.engine.core.common.businessentities.network.InterfaceStatus;
import org.ovirt.engine.core.common.businessentities.network.Network;
import org.ovirt.engine.core.common.businessentities.network.NetworkBootProtocol;
import org.ovirt.engine.core.common.businessentities.network.VdsInterfaceType;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkStatistics;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.utils.EnumUtils;
import org.ovirt.engine.core.common.utils.SizeConverter;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.RpmVersion;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.utils.NetworkUtils;
import org.ovirt.engine.core.utils.NumaUtils;
import org.ovirt.engine.core.utils.SerializationFactory;
import org.ovirt.engine.core.utils.log.Log;
import org.ovirt.engine.core.utils.log.LogFactory;
/**
* This class encapsulate the knowledge of how to create objects from the VDS RPC protocol response.
* This class has methods that receive XmlRpcStruct and construct the following Classes: VmDynamic VdsDynamic VdsStatic.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class VdsBrokerObjectsBuilder {
private final static int VNC_START_PORT = 5900;
private final static double NANO_SECONDS = 1000000000;
public static VmDynamic buildVMDynamicDataFromList(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vmdynamic.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vmdynamic.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
return vmdynamic;
}
public static VmDynamic buildVMDynamicData(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
updateVMDynamicData(vmdynamic, xmlRpcStruct);
return vmdynamic;
}
public static StoragePool buildStoragePool(Map<String, Object> xmlRpcStruct) {
StoragePool sPool = new StoragePool();
if (xmlRpcStruct.containsKey("type")) {
sPool.setIsLocal(StorageType.valueOf(xmlRpcStruct.get("type").toString()).isLocal());
}
sPool.setName(AssignStringValue(xmlRpcStruct, "name"));
Integer masterVersion = AssignIntValue(xmlRpcStruct, "master_ver");
if (masterVersion != null) {
sPool.setmaster_domain_version(masterVersion);
}
return sPool;
}
public static VmStatistics buildVMStatisticsData(Map<String, Object> xmlRpcStruct) {
VmStatistics vmStatistics = new VmStatistics();
updateVMStatisticsData(vmStatistics, xmlRpcStruct);
return vmStatistics;
}
public static Map<String, LUNs> buildVmLunDisksData(Map<String, Object> xmlRpcStruct) {
Map<String, Object> disks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_disks);
Map<String, LUNs> lunsMap = new HashMap<>();
if (disks != null) {
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
String lunGuidString = AssignStringValue(disk, VdsProperties.lun_guid);
if (!StringUtils.isEmpty(lunGuidString)) {
LUNs lun = new LUNs();
lun.setLUN_id(lunGuidString);
if (disk.containsKey(VdsProperties.disk_true_size)) {
long sizeInBytes = AssignLongValue(disk, VdsProperties.disk_true_size);
int sizeInGB = SizeConverter.convert(
sizeInBytes, SizeConverter.SizeUnit.BYTES, SizeConverter.SizeUnit.GB).intValue();
lun.setDeviceSize(sizeInGB);
}
lunsMap.put(lunGuidString, lun);
}
}
}
return lunsMap;
}
public static void updateVMDynamicData(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.session)) {
String session = (String) xmlRpcStruct.get(VdsProperties.session);
try {
vm.setSession(SessionState.valueOf(session));
} catch (Exception e) {
log.errorFormat("vm session value illegal : {0}", session);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.kvmEnable)) {
vm.setKvmEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.kvmEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.acpiEnable)) {
vm.setAcpiEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.acpiEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.win2kHackEnable)) {
vm.setWin2kHackEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.win2kHackEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vm.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
if (xmlRpcStruct.containsKey(VdsProperties.display_port)) {
try {
vm.setDisplay(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_port).toString()));
} catch (NumberFormatException e) {
log.errorFormat("vm display_port value illegal : {0}", xmlRpcStruct.get(VdsProperties.display_port));
}
} else if (xmlRpcStruct.containsKey(VdsProperties.display)) {
try {
vm.setDisplay(VNC_START_PORT + Integer.parseInt(xmlRpcStruct.get(VdsProperties.display).toString()));
} catch (NumberFormatException e) {
log.errorFormat("vm display value illegal : {0}", xmlRpcStruct.get(VdsProperties.display));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.display_secure_port)) {
try {
vm.setDisplaySecurePort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_secure_port)
.toString()));
} catch (NumberFormatException e) {
log.errorFormat("vm display_secure_port value illegal : {0}",
xmlRpcStruct.get(VdsProperties.display_secure_port));
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayType))) {
String displayType = xmlRpcStruct.get(VdsProperties.displayType).toString();
try {
vm.setDisplayType(DisplayType.valueOf(displayType));
} catch (Exception e2) {
log.errorFormat("vm display type value illegal : {0}", displayType);
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayIp))) {
vm.setDisplayIp((String) xmlRpcStruct.get(VdsProperties.displayIp));
}
if (xmlRpcStruct.containsKey((VdsProperties.utc_diff))) {
String utc_diff = xmlRpcStruct.get(VdsProperties.utc_diff).toString();
if (utc_diff.startsWith("+")) {
utc_diff = utc_diff.substring(1);
}
try {
vm.setUtcDiff(Integer.parseInt(utc_diff));
} catch (NumberFormatException e) {
log.errorFormat("vm offset (utc_diff) value illegal : {0}", utc_diff);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.hash)) {
String hash = (String) xmlRpcStruct.get(VdsProperties.hash);
try {
vm.setHash(hash);
} catch (Exception e) {
log.errorFormat("vm hash value illegal : {0}", hash);
}
}
/**
* vm disks
*/
if (xmlRpcStruct.containsKey(VdsProperties.vm_disks)) {
initDisks(xmlRpcStruct, vm);
}
// ------------- vm internal agent data
vm.setGuestLastLoginTime(AssignDateTImeFromEpoch(xmlRpcStruct, VdsProperties.guest_last_login_time));
vm.setVmHost(AssignStringValue(xmlRpcStruct, VdsProperties.vm_host));
String guestUserName = AssignStringValue(xmlRpcStruct, VdsProperties.guest_cur_user_name);
vm.setGuestCurrentUserName(guestUserName);
initAppsList(xmlRpcStruct, vm);
vm.setGuestOs(AssignStringValue(xmlRpcStruct, VdsProperties.guest_os));
if (xmlRpcStruct.containsKey(VdsProperties.VM_FQDN)) {
vm.setVmFQDN(AssignStringValue(xmlRpcStruct, VdsProperties.VM_FQDN));
String fqdn = vm.getVmFQDN().trim();
if ("localhost".equalsIgnoreCase(fqdn) || "localhost.localdomain".equalsIgnoreCase(fqdn)) {
vm.setVmFQDN(null);
}
else {
vm.setVmFQDN(fqdn);
}
}
vm.setVmIp(AssignStringValue(xmlRpcStruct, VdsProperties.VM_IP));
if (vm.getVmIp() != null) {
if (vm.getVmIp().startsWith("127.0.")) {
vm.setVmIp(null);
} else {
vm.setVmIp(vm.getVmIp().trim());
}
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_code)) {
String exitCodeStr = xmlRpcStruct.get(VdsProperties.exit_code).toString();
vm.setExitStatus(VmExitStatus.forValue(Integer.parseInt(exitCodeStr)));
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_message)) {
String exitMsg = (String) xmlRpcStruct.get(VdsProperties.exit_message);
vm.setExitMessage(exitMsg);
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_reason)) {
String exitReasonStr = xmlRpcStruct.get(VdsProperties.exit_reason).toString();
vm.setExitReason(VmExitReason.forValue(Integer.parseInt(exitReasonStr)));
} else {
vm.setExitReason(VmExitReason.Unknown);
}
// if monitorResponse returns negative it means its erroneous
if (xmlRpcStruct.containsKey(VdsProperties.monitorResponse)) {
int response = Integer.parseInt(xmlRpcStruct.get(VdsProperties.monitorResponse).toString());
if (response < 0) {
vm.setStatus(VMStatus.NotResponding);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.clientIp)) {
vm.setClientIp(xmlRpcStruct.get(VdsProperties.clientIp).toString());
}
VmPauseStatus pauseStatus = VmPauseStatus.NONE;
if (xmlRpcStruct.containsKey(VdsProperties.pauseCode)) {
String pauseCodeStr = (String) xmlRpcStruct.get(VdsProperties.pauseCode);
try {
pauseStatus = VmPauseStatus.valueOf(pauseCodeStr);
} catch (IllegalArgumentException ex) {
log.error("Error in parsing vm pause status. Setting value to NONE");
pauseStatus = VmPauseStatus.NONE;
}
}
vm.setPauseStatus(pauseStatus);
if (xmlRpcStruct.containsKey(VdsProperties.watchdogEvent)) {
Map<String, Object> watchdogStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.watchdogEvent);
double time = Double.parseDouble(watchdogStruct.get(VdsProperties.time).toString());
// vdsm may not send the action http://gerrit.ovirt.org/14134
String action =
watchdogStruct.containsKey(VdsProperties.action) ? watchdogStruct.get(VdsProperties.action)
.toString() : null;
vm.setLastWatchdogEvent((long) time);
vm.setLastWatchdogAction(action);
}
if (xmlRpcStruct.containsKey(VdsProperties.CDRom)) {
String isoName = Paths.get((String) xmlRpcStruct.get(VdsProperties.CDRom)).getFileName().toString();
vm.setCurrentCd(isoName);
}
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_CPU_COUNT)) {
vm.setGuestCpuCount(AssignIntValue(xmlRpcStruct, VdsProperties.GUEST_CPU_COUNT));
}
}
public static void updateVMStatisticsData(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
vm.setelapsed_time(AssignDoubleValue(xmlRpcStruct, VdsProperties.elapsed_time));
// ------------- vm network statistics -----------------------
if (xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK)) {
Map networkStruct = (Map) xmlRpcStruct.get(VdsProperties.VM_NETWORK);
vm.setInterfaceStatistics(new ArrayList<VmNetworkInterface>());
for (Object tempNic : networkStruct.values()) {
Map nic = (Map) tempNic;
VmNetworkInterface stats = new VmNetworkInterface();
vm.getInterfaceStatistics().add(stats);
if (nic.containsKey(VdsProperties.VM_INTERFACE_NAME)) {
stats.setName((String) ((nic.get(VdsProperties.VM_INTERFACE_NAME) instanceof String) ? nic
.get(VdsProperties.VM_INTERFACE_NAME) : null));
}
Double rx_rate = AssignDoubleValue(nic, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(nic, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(nic, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(nic, VdsProperties.tx_dropped);
stats.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
stats.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
stats.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
stats.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
stats.setMacAddress((String) ((nic.get(VdsProperties.MAC_ADDR) instanceof String) ? nic
.get(VdsProperties.MAC_ADDR) : null));
stats.setSpeed(AssignIntValue(nic, VdsProperties.INTERFACE_SPEED));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_DISKS_USAGE)) {
initDisksUsage(xmlRpcStruct, vm);
}
// ------------- vm cpu statistics -----------------------
vm.setcpu_sys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vm.setcpu_user(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
// ------------- vm memory statistics -----------------------
vm.setusage_mem_percent(AssignIntValue(xmlRpcStruct, VdsProperties.vm_usage_mem_percent));
vm.setVmBalloonInfo(getBalloonInfo(xmlRpcStruct));
// ------------- vm migration statistics -----------------------
Integer migrationProgress = AssignIntValue(xmlRpcStruct, VdsProperties.vm_migration_progress_percent);
vm.setMigrationProgressPercent(migrationProgress != null ? migrationProgress : 0);
// ------------- vm jobs -------------
vm.setVmJobs(getVmJobs(vm.getId(), xmlRpcStruct));
}
private static VmBalloonInfo getBalloonInfo(Map<String, Object> xmlRpcStruct) {
Map<String, Object> balloonInfo = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_balloonInfo);
VmBalloonInfo vmBalloonInfo = new VmBalloonInfo();
if (balloonInfo != null && balloonInfo.size() > 0) {
vmBalloonInfo.setCurrentMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_cur));
vmBalloonInfo.setBalloonMaxMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_max));
vmBalloonInfo.setBalloonTargetMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_target));
vmBalloonInfo.setBalloonMinMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_min));
if (balloonInfo.size() >= 4) { // only if all 4 properties are found the balloon is considered enabled (available from 3.3)
vmBalloonInfo.setBalloonDeviceEnabled(true);
}
} else {
vmBalloonInfo.setBalloonDeviceEnabled(false);
}
return vmBalloonInfo;
}
private static List<VmJob> getVmJobs(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.vmJobs)) {
return null;
}
List<VmJob> vmJobs = new ArrayList<VmJob>();
for (Object jobMap : ((Map<String, Object>) xmlRpcStruct.get(VdsProperties.vmJobs)).values()) {
VmJob job = buildVmJobData(vmId, (Map<String, Object>) jobMap);
vmJobs.add(job);
}
return vmJobs;
}
private static VmJob buildVmJobData(Guid vmId, Map<String, Object> xmlRpcStruct) {
VmJob ret;
VmJobType jobType = VmJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobType));
if (jobType == null) {
jobType = VmJobType.UNKNOWN;
}
switch (jobType) {
case BLOCK:
VmBlockJob blockJob = new VmBlockJob();
blockJob.setBlockJobType(VmBlockJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmBlockJobType)));
blockJob.setCursorCur(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorCur));
blockJob.setCursorEnd(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorEnd));
blockJob.setBandwidth(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobBandwidth));
blockJob.setImageGroupId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobImageUUID)));
ret = blockJob;
break;
default:
ret = new VmJob();
break;
}
ret.setVmId(vmId);
ret.setId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobId)));
ret.setJobState(VmJobState.NORMAL);
ret.setJobType(jobType);
return ret;
}
public static void updateVDSDynamicData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setSupportedClusterLevels(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_cluster_levels));
updateNetworkData(vds, xmlRpcStruct);
updateNumaNodesData(vds, xmlRpcStruct);
vds.setCpuThreads(AssignIntValue(xmlRpcStruct, VdsProperties.cpuThreads));
vds.setCpuCores(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_cores));
vds.setCpuSockets(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_sockets));
vds.setCpuModel(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_model));
vds.setCpuSpeedMh(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_speed_mh));
vds.setPhysicalMemMb(AssignIntValue(xmlRpcStruct, VdsProperties.physical_mem_mb));
vds.setKvmEnabled(AssignBoolValue(xmlRpcStruct, VdsProperties.kvm_enabled));
vds.setReservedMem(AssignIntValue(xmlRpcStruct, VdsProperties.reservedMem));
Integer guestOverhead = AssignIntValue(xmlRpcStruct, VdsProperties.guestOverhead);
vds.setGuestOverhead(guestOverhead != null ? guestOverhead : 0);
vds.setCpuFlags(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_flags));
UpdatePackagesVersions(vds, xmlRpcStruct);
vds.setSupportedEngines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_engines));
vds.setIScsiInitiatorName(AssignStringValue(xmlRpcStruct, VdsProperties.iSCSIInitiatorName));
vds.setSupportedEmulatedMachines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.emulatedMachines));
setRngSupportedSourcesToVds(vds, xmlRpcStruct);
String hooksStr = ""; // default value if hooks is not in the xml rpc struct
if (xmlRpcStruct.containsKey(VdsProperties.hooks)) {
hooksStr = xmlRpcStruct.get(VdsProperties.hooks).toString();
}
vds.setHooksStr(hooksStr);
// parse out the HBAs available in this host
Map<String, List<Map<String, String>>> hbas = new HashMap<>();
for (Map.Entry<String, Object[]> el: ((Map<String, Object[]>)xmlRpcStruct.get(VdsProperties.HBAInventory)).entrySet()) {
List<Map<String, String>> devicesList = new ArrayList<Map<String, String>>();
for (Object device: el.getValue()) {
devicesList.add((Map<String, String>)device);
}
hbas.put(el.getKey(), devicesList);
}
vds.setHBAs(hbas);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
vds.setKdumpStatus(KdumpStatus.valueOfNumber(AssignIntValue(xmlRpcStruct, VdsProperties.KDUMP_STATUS)));
Map<String, Object> selinux = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.selinux);
if (selinux != null) {
vds.setSELinuxEnforceMode(AssignIntValue(selinux, VdsProperties.selinux_mode));
} else {
vds.setSELinuxEnforceMode(null);
}
if (xmlRpcStruct.containsKey(VdsProperties.liveSnapshotSupport)) {
vds.setLiveSnapshotSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveSnapshotSupport));
}
if (xmlRpcStruct.containsKey(VdsProperties.liveMergeSupport)) {
vds.setLiveMergeSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveMergeSupport));
} else {
vds.setLiveMergeSupport(false);
}
}
private static void setRngSupportedSourcesToVds(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.getSupportedRngSources().clear();
String rngSourcesFromStruct = AssignStringValueFromArray(xmlRpcStruct, VdsProperties.rngSources);
if (rngSourcesFromStruct != null) {
vds.getSupportedRngSources().addAll(VmRngDevice.csvToSourcesSet(rngSourcesFromStruct.toUpperCase()));
}
}
public static void checkTimeDrift(VDS vds, Map<String, Object> xmlRpcStruct) {
Boolean isHostTimeDriftEnabled = Config.getValue(ConfigValues.EnableHostTimeDrift);
if (isHostTimeDriftEnabled) {
Integer maxTimeDriftAllowed = Config.getValue(ConfigValues.HostTimeDriftInSec);
Date hostDate = AssignDatetimeValue(xmlRpcStruct, VdsProperties.hostDatetime);
if (hostDate != null) {
Long timeDrift =
TimeUnit.MILLISECONDS.toSeconds(Math.abs(hostDate.getTime() - System.currentTimeMillis()));
if (timeDrift > maxTimeDriftAllowed) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("Actual", timeDrift.toString());
logable.addCustomValue("Max", maxTimeDriftAllowed.toString());
AuditLogDirector.log(logable, AuditLogType.VDS_TIME_DRIFT_ALERT);
}
} else {
log.error("Time Drift validation: failed to get Host or Engine time.");
}
}
}
private static void initDisksUsage(Map<String, Object> vmStruct, VmStatistics vm) {
Object[] vmDisksUsage = (Object[]) vmStruct.get(VdsProperties.VM_DISKS_USAGE);
if (vmDisksUsage != null) {
ArrayList<Object> disksUsageList = new ArrayList<Object>(Arrays.asList(vmDisksUsage));
vm.setDisksUsage(SerializationFactory.getSerializer().serializeUnformattedJson(disksUsageList));
}
}
private static void UpdatePackagesVersions(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setVersionName(AssignStringValue(xmlRpcStruct, VdsProperties.version_name));
vds.setSoftwareVersion(AssignStringValue(xmlRpcStruct, VdsProperties.software_version));
vds.setBuildName(AssignStringValue(xmlRpcStruct, VdsProperties.build_name));
if (xmlRpcStruct.containsKey(VdsProperties.host_os)) {
vds.setHostOs(GetPackageVersionFormated(
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.host_os), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.packages)) {
// packages is an array of xmlRpcStruct (that each is a name, ver,
// release.. of a package)
for (Object hostPackageMap : (Object[]) xmlRpcStruct.get(VdsProperties.packages)) {
Map<String, Object> hostPackage = (Map<String, Object>) hostPackageMap;
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
if (VdsProperties.kvmPackageName.equals(packageName)) {
vds.setKvmVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.spicePackageName.equals(packageName)) {
vds.setSpiceVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.kernelPackageName.equals(packageName)) {
vds.setKernelVersion(GetPackageVersionFormated(hostPackage, false));
}
}
} else if (xmlRpcStruct.containsKey(VdsProperties.packages2)) {
Map<String, Object> packages = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.packages2);
if (packages.containsKey(VdsProperties.vdsmPackageName)) {
Map<String, Object> vdsm = (Map<String, Object>) packages.get(VdsProperties.vdsmPackageName);
vds.setVersion(getPackageRpmVersion("vdsm", vdsm));
}
if (packages.containsKey(VdsProperties.qemuKvmPackageName)) {
Map<String, Object> kvm = (Map<String, Object>) packages.get(VdsProperties.qemuKvmPackageName);
vds.setKvmVersion(getPackageVersionFormated2(kvm));
}
if (packages.containsKey(VdsProperties.libvirtPackageName)) {
Map<String, Object> libvirt = (Map<String, Object>) packages.get(VdsProperties.libvirtPackageName);
vds.setLibvirtVersion(getPackageRpmVersion("libvirt", libvirt));
}
if (packages.containsKey(VdsProperties.spiceServerPackageName)) {
Map<String, Object> spice = (Map<String, Object>) packages.get(VdsProperties.spiceServerPackageName);
vds.setSpiceVersion(getPackageVersionFormated2(spice));
}
if (packages.containsKey(VdsProperties.kernelPackageName)) {
Map<String, Object> kernel = (Map<String, Object>) packages.get(VdsProperties.kernelPackageName);
vds.setKernelVersion(getPackageVersionFormated2(kernel));
}
if (packages.containsKey(VdsProperties.GLUSTER_PACKAGE_NAME)) {
Map<String, Object> gluster = (Map<String, Object>) packages.get(VdsProperties.GLUSTER_PACKAGE_NAME);
vds.setGlusterVersion(getPackageRpmVersion("glusterfs", gluster));
}
}
}
// Version 2 of GetPackageVersionFormated2:
// from 2.3 we get dictionary and not a flat list.
// from now the packages names (of spice, kernel, qemu and libvirt) are the same as far as VDSM and ENGINE.
// (VDSM use to report packages name of rpm so in RHEL6 when it change it broke our interface)
private static String getPackageVersionFormated2(Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageVersion)) {
sb.append(packageVersion);
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
private static RpmVersion getPackageRpmVersion(String packageName, Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName)) {
sb.append(packageName);
}
boolean hasPackageVersion = StringUtils.isEmpty(packageVersion);
boolean hasPackageRelease = StringUtils.isEmpty(packageRelease);
if (!hasPackageVersion || !hasPackageRelease) {
sb.append("-");
}
if (!hasPackageVersion) {
sb.append(packageVersion);
}
if (!hasPackageRelease) {
if (sb.length() > 0) {
sb.append(String.format("-%1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return new RpmVersion(sb.toString());
}
public static void updateHardwareSystemInformation(Map<String, Object> hwInfo, VDS vds){
vds.setHardwareManufacturer(AssignStringValue(hwInfo, VdsProperties.hwManufacturer));
vds.setHardwareProductName(AssignStringValue(hwInfo, VdsProperties.hwProductName));
vds.setHardwareVersion(AssignStringValue(hwInfo, VdsProperties.hwVersion));
vds.setHardwareSerialNumber(AssignStringValue(hwInfo, VdsProperties.hwSerialNumber));
vds.setHardwareUUID(AssignStringValue(hwInfo, VdsProperties.hwUUID));
vds.setHardwareFamily(AssignStringValue(hwInfo, VdsProperties.hwFamily));
}
private static String GetPackageVersionFormated(Map<String, Object> hostPackage, boolean getName) {
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
String packageVersion = AssignStringValue(hostPackage, VdsProperties.package_version);
String packageRelease = AssignStringValue(hostPackage, VdsProperties.package_release);
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName) && getName) {
sb.append(packageName);
}
if (!StringUtils.isEmpty(packageVersion)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageVersion));
} else {
sb.append(packageVersion);
}
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
public static void updateVDSStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
// ------------- vds memory usage ---------------------------
vds.setUsageMemPercent(AssignIntValue(xmlRpcStruct, VdsProperties.mem_usage));
// ------------- vds network statistics ---------------------
Map<String, Object> interfaces = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK);
if (interfaces != null) {
int networkUsage = 0;
Map<String, VdsNetworkInterface> nicsByName = Entities.entitiesByName(vds.getInterfaces());
for (Entry<String, Object> entry : interfaces.entrySet()) {
if (nicsByName.containsKey(entry.getKey())) {
VdsNetworkInterface iface = nicsByName.get(entry.getKey());
iface.setVdsId(vds.getId());
Map<String, Object> dict = (Map<String, Object>) entry.getValue();
Double rx_rate = AssignDoubleValue(dict, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(dict, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(dict, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(dict, VdsProperties.tx_dropped);
iface.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
iface.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
iface.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
iface.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
iface.setSpeed(AssignIntValue(dict, VdsProperties.INTERFACE_SPEED));
iface.getStatistics().setStatus(AssignInterfaceStatusValue(dict, VdsProperties.iface_status));
if (!NetworkUtils.isVlan(iface) && !iface.isBondSlave()) {
networkUsage = (int) Math.max(networkUsage, computeInterfaceUsage(iface));
}
}
}
vds.setUsageNetworkPercent(networkUsage);
}
// ----------- vds cpu statistics info ---------------------
vds.setCpuSys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vds.setCpuUser(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
if (vds.getCpuSys() != null && vds.getCpuUser() != null) {
vds.setUsageCpuPercent((int) (vds.getCpuSys() + vds.getCpuUser()));
}
// CPU load reported by VDSM is in uptime-style format, i.e. normalized
// to unity, so that say an 8% load is reported as 0.08
Double d = AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_load);
d = (d != null) ? d : 0;
vds.setCpuLoad(d.doubleValue() * 100.0);
vds.setCpuIdle(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_idle));
vds.setMemAvailable(AssignLongValue(xmlRpcStruct, VdsProperties.mem_available));
vds.setMemFree(AssignLongValue(xmlRpcStruct, VdsProperties.memFree));
vds.setMemShared(AssignLongValue(xmlRpcStruct, VdsProperties.mem_shared));
vds.setSwapFree(AssignLongValue(xmlRpcStruct, VdsProperties.swap_free));
vds.setSwapTotal(AssignLongValue(xmlRpcStruct, VdsProperties.swap_total));
vds.setKsmCpuPercent(AssignIntValue(xmlRpcStruct, VdsProperties.ksm_cpu_percent));
vds.setKsmPages(AssignLongValue(xmlRpcStruct, VdsProperties.ksm_pages));
vds.setKsmState(AssignBoolValue(xmlRpcStruct, VdsProperties.ksm_state));
// dynamic data got from GetVdsStats
if (xmlRpcStruct.containsKey(VdsProperties.transparent_huge_pages_state)) {
vds.setTransparentHugePagesState(EnumUtils.valueOf(VdsTransparentHugePagesState.class, xmlRpcStruct
.get(VdsProperties.transparent_huge_pages_state).toString(), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.anonymous_transparent_huge_pages)) {
vds.setAnonymousHugePages(AssignIntValue(xmlRpcStruct, VdsProperties.anonymous_transparent_huge_pages));
}
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
vds.setImagesLastCheck(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_check));
vds.setImagesLastDelay(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_delay));
Integer vm_count = AssignIntValue(xmlRpcStruct, VdsProperties.vm_count);
vds.setVmCount(vm_count == null ? 0 : vm_count);
vds.setVmActive(AssignIntValue(xmlRpcStruct, VdsProperties.vm_active));
vds.setVmMigrating(AssignIntValue(xmlRpcStruct, VdsProperties.vm_migrating));
updateVDSDomainData(vds, xmlRpcStruct);
updateLocalDisksUsage(vds, xmlRpcStruct);
// hosted engine
Integer haScore = null;
Boolean haIsConfigured = null;
Boolean haIsActive = null;
Boolean haGlobalMaint = null;
Boolean haLocalMaint = null;
if (xmlRpcStruct.containsKey(VdsProperties.ha_stats)) {
Map<String, Object> haStats = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.ha_stats);
if (haStats != null) {
haScore = AssignIntValue(haStats, VdsProperties.ha_stats_score);
haIsConfigured = AssignBoolValue(haStats, VdsProperties.ha_stats_is_configured);
haIsActive = AssignBoolValue(haStats, VdsProperties.ha_stats_is_active);
haGlobalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_global_maintenance);
haLocalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_local_maintenance);
}
} else {
haScore = AssignIntValue(xmlRpcStruct, VdsProperties.ha_score);
// prior to 3.4, haScore was returned if ha was installed; assume active if > 0
if (haScore != null) {
haIsConfigured = true;
haIsActive = (haScore > 0);
}
}
vds.setHighlyAvailableScore(haScore != null ? haScore : 0);
vds.setHighlyAvailableIsConfigured(haIsConfigured != null ? haIsConfigured : false);
vds.setHighlyAvailableIsActive(haIsActive != null ? haIsActive : false);
vds.setHighlyAvailableGlobalMaintenance(haGlobalMaint != null ? haGlobalMaint : false);
vds.setHighlyAvailableLocalMaintenance(haLocalMaint != null ? haLocalMaint : false);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
updateNumaStatisticsData(vds, xmlRpcStruct);
}
private static double computeInterfaceUsage(VdsNetworkInterface iface) {
return Math.max(truncatePercentage(iface.getStatistics().getReceiveRate()),
truncatePercentage(iface.getStatistics().getTransmitRate()));
}
private static double truncatePercentage(double value) {
return Math.min(100, value);
}
public static void updateNumaStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
List<VdsNumaNode> vdsNumaNodes = new ArrayList<>();
List<CpuStatistics> cpuStatsData = new ArrayList<>();
if (xmlRpcStruct.containsKey(VdsProperties.CPU_STATS)) {
Map<String, Map<String, Object>> cpuStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.CPU_STATS);
Map<Integer, List<CpuStatistics>> numaNodeCpuStats = new HashMap<>();
for (Map.Entry<String, Map<String, Object>> item : cpuStats.entrySet()) {
CpuStatistics data = buildVdsCpuStatistics(item);
cpuStatsData.add(data);
int numaNodeIndex = AssignIntValue(item.getValue(), VdsProperties.NUMA_NODE_INDEX);
if (!numaNodeCpuStats.containsKey(numaNodeIndex)) {
numaNodeCpuStats.put(numaNodeIndex, new ArrayList<CpuStatistics>());
}
numaNodeCpuStats.get(numaNodeIndex).add(data);
}
DecimalFormat percentageFormatter = new DecimalFormat("#.##");
for (Map.Entry<Integer, List<CpuStatistics>> item : numaNodeCpuStats.entrySet()) {
VdsNumaNode node = buildVdsNumaNodeStatistics(percentageFormatter, item);
vdsNumaNodes.add(node);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODE_FREE_MEM_STAT)) {
Map<String, Map<String, Object>> memStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.NUMA_NODE_FREE_MEM_STAT);
for (Map.Entry<String, Map<String, Object>> item : memStats.entrySet()) {
VdsNumaNode node = NumaUtils.getVdsNumaNodeByIndex(vdsNumaNodes, Integer.valueOf(item.getKey()));
if (node != null) {
node.getNumaNodeStatistics().setMemFree(AssignLongValue(item.getValue(),
VdsProperties.NUMA_NODE_FREE_MEM));
node.getNumaNodeStatistics().setMemUsagePercent(AssignIntValue(item.getValue(),
VdsProperties.NUMA_NODE_MEM_PERCENT));
}
}
}
vds.getNumaNodeList().clear();
vds.getNumaNodeList().addAll(vdsNumaNodes);
vds.getStatisticsData().getCpuCoreStatistics().clear();
vds.getStatisticsData().getCpuCoreStatistics().addAll(cpuStatsData);
}
private static VdsNumaNode buildVdsNumaNodeStatistics(DecimalFormat percentageFormatter,
Map.Entry<Integer, List<CpuStatistics>> item) {
VdsNumaNode node = new VdsNumaNode();
NumaNodeStatistics nodeStat = new NumaNodeStatistics();
double nodeCpuUser = 0.0;
double nodeCpuSys = 0.0;
double nodeCpuIdle = 0.0;
for (CpuStatistics cpuStat : item.getValue()) {
nodeCpuUser += cpuStat.getCpuUser();
nodeCpuSys += cpuStat.getCpuSys();
nodeCpuIdle += cpuStat.getCpuIdle();
}
nodeStat.setCpuUser(Double.valueOf(percentageFormatter.format(nodeCpuUser / item.getValue().size())));
nodeStat.setCpuSys(Double.valueOf(percentageFormatter.format(nodeCpuSys / item.getValue().size())));
nodeStat.setCpuIdle(Double.valueOf(percentageFormatter.format(nodeCpuIdle / item.getValue().size())));
nodeStat.setCpuUsagePercent((int) (nodeStat.getCpuSys() + nodeStat.getCpuUser()));
node.setIndex(item.getKey());
node.setNumaNodeStatistics(nodeStat);
return node;
}
private static CpuStatistics buildVdsCpuStatistics(Map.Entry<String, Map<String, Object>> item) {
CpuStatistics data = new CpuStatistics();
data.setCpuId(Integer.valueOf(item.getKey()));
data.setCpuUser(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_USER));
data.setCpuSys(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_SYS));
data.setCpuIdle(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_IDLE));
data.setCpuUsagePercent((int) (data.getCpuSys() + data.getCpuUser()));
return data;
}
/**
* Update {@link VDS#setLocalDisksUsage(Map)} with map of paths usage extracted from the returned returned value. The
* usage is reported in MB.
*
* @param vds
* The VDS object to update.
* @param xmlRpcStruct
* The XML/RPC to extract the usage from.
*/
protected static void updateLocalDisksUsage(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.DISK_STATS)) {
Map<String, Object> diskStatsStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.DISK_STATS);
Map<String, Long> diskStats = new HashMap<String, Long>();
vds.setLocalDisksUsage(diskStats);
for (Entry<String, Object> entry : diskStatsStruct.entrySet()) {
Map<String, Object> pathStatsStruct = (Map<String, Object>) entry.getValue();
diskStats.put(entry.getKey(), AssignLongValue(pathStatsStruct, VdsProperties.DISK_STATS_FREE));
}
}
}
private static void updateVDSDomainData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.domains)) {
Map<String, Object> domains = (Map<String, Object>)
xmlRpcStruct.get(VdsProperties.domains);
ArrayList<VDSDomainsData> domainsData = new ArrayList<VDSDomainsData>();
for (Map.Entry<String, ?> value : domains.entrySet()) {
try {
VDSDomainsData data = new VDSDomainsData();
data.setDomainId(new Guid(value.getKey().toString()));
Map<String, Object> internalValue = (Map<String, Object>) value.getValue();
double lastCheck = 0;
data.setCode((Integer) (internalValue).get(VdsProperties.code));
if (internalValue.containsKey(VdsProperties.lastCheck)) {
lastCheck = Double.parseDouble((String) internalValue.get(VdsProperties.lastCheck));
}
data.setLastCheck(lastCheck);
double delay = 0;
if (internalValue.containsKey(VdsProperties.delay)) {
delay = Double.parseDouble((String) internalValue.get(VdsProperties.delay));
}
data.setDelay(delay);
domainsData.add(data);
} catch (Exception e) {
log.error("failed building domains", e);
}
}
vds.setDomains(domainsData);
}
}
private static InterfaceStatus AssignInterfaceStatusValue(Map<String, Object> input, String name) {
InterfaceStatus ifaceStatus = InterfaceStatus.NONE;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) {
if (stringValue.toLowerCase().trim().equals("up")) {
ifaceStatus = InterfaceStatus.UP;
} else {
ifaceStatus = InterfaceStatus.DOWN;
}
}
}
return ifaceStatus;
}
private static Double AssignDoubleValue(Map<String, Object> input, String name) {
Double returnValue = null;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
returnValue = (stringValue == null) ? null : Double.parseDouble(stringValue);
}
return returnValue;
}
/**
* Do the same logic as AssignDoubleValue does, but instead, in case of null we return 0.
* @param input - the Input xml
* @param name - The name of the field we want to cast it to double.
* @return - the double value.
*/
private static Double assignDoubleValueWithNullProtection(Map<String, Object> input, String name) {
Double doubleValue = AssignDoubleValue(input, name);
return (doubleValue == null ? Double.valueOf(0.0) : doubleValue);
}
private static Integer AssignIntValue(Map input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Integer) {
return (Integer) input.get(name);
}
String stringValue = (String) input.get(name);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
int intValue = Integer.parseInt(stringValue);
return intValue;
} catch (NumberFormatException nfe) {
String errMsg = String.format("Failed to parse %1$s value %2$s to integer", name, stringValue);
log.error(errMsg, nfe);
}
}
return null;
}
private static Long AssignLongValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Long || input.get(name) instanceof Integer) {
return Long.parseLong(input.get(name).toString());
}
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
return Long.parseLong(stringValue);
} catch (NumberFormatException e) {
log.errorFormat("Failed to parse {0} value {1} to long", name, stringValue);
}
}
return null;
}
private static String AssignStringValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
return (String) ((input.get(name) instanceof String) ? input.get(name) : null);
}
return null;
}
private static String AssignStringValueFromArray(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
String[] arr = (String[]) ((input.get(name) instanceof String[]) ? input.get(name) : null);
if (arr == null) {
Object[] arr2 = (Object[]) ((input.get(name) instanceof Object[]) ? input.get(name) : null);
if (arr2 != null) {
arr = new String[arr2.length];
for (int i = 0; i < arr2.length; i++)
arr[i] = arr2[i].toString();
}
}
if (arr != null) {
return StringUtils.join(arr, ',');
}
}
return null;
}
private static Date AssignDateTImeFromEpoch(Map<String, Object> input, String name) {
Date retval = null;
try {
if (input.containsKey(name)) {
Double secsSinceEpoch = (Double) input.get(name);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(secsSinceEpoch.longValue());
retval = calendar.getTime();
}
} catch (RuntimeException ex) {
String msg = String.format("VdsBroker::AssignDateTImeFromEpoch - failed to convert field %1$s to dateTime",
name);
log.warn(msg, ex);
retval = null;
}
return retval;
}
private static Date AssignDatetimeValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Date) {
return (Date) input.get(name);
}
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
try {
String dateStr = input.get(name).toString().replaceFirst("T", " ").trim();
return formatter.parse(dateStr);
} catch (ParseException e) {
e.printStackTrace();
}
}
return null;
}
private static Boolean AssignBoolValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Boolean) {
return (Boolean) input.get(name);
}
return Boolean.parseBoolean(input.get(name).toString());
}
return Boolean.FALSE;
}
private static void initDisks(Map<String, Object> vmStruct, VmDynamic vm) {
Map<String, Object> disks = (Map<String, Object>) vmStruct.get(VdsProperties.vm_disks);
ArrayList<DiskImageDynamic> disksData = new ArrayList<DiskImageDynamic>();
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
DiskImageDynamic diskData = new DiskImageDynamic();
String imageGroupIdString = AssignStringValue(disk, VdsProperties.image_group_id);
if (!StringUtils.isEmpty(imageGroupIdString)) {
Guid imageGroupIdGuid = new Guid(imageGroupIdString);
diskData.setId(imageGroupIdGuid);
diskData.setread_rate(AssignIntValue(disk, VdsProperties.vm_disk_read_rate));
diskData.setwrite_rate(AssignIntValue(disk, VdsProperties.vm_disk_write_rate));
if (disk.containsKey(VdsProperties.disk_actual_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_actual_size);
diskData.setactual_size(size != null ? size * 512 : 0);
} else if (disk.containsKey(VdsProperties.disk_true_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_true_size);
diskData.setactual_size(size != null ? size : 0);
}
if (disk.containsKey(VdsProperties.vm_disk_read_latency)) {
diskData.setReadLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_read_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_write_latency)) {
diskData.setWriteLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_write_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_flush_latency)) {
diskData.setFlushLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_flush_latency) / NANO_SECONDS);
}
disksData.add(diskData);
}
}
vm.setDisks(disksData);
}
private static void initAppsList(Map<String, Object> vmStruct, VmDynamic vm) {
if (vmStruct.containsKey(VdsProperties.app_list)) {
Object tempAppsList = vmStruct.get(VdsProperties.app_list);
if (tempAppsList instanceof Object[]) {
Object[] apps = (Object[]) tempAppsList;
StringBuilder builder = new StringBuilder();
boolean firstTime = true;
for (Object app : apps) {
String appString = (String) ((app instanceof String) ? app : null);
if (app == null) {
log.warn("Failed to convert app: [null] to string");
}
if (!firstTime) {
builder.append(",");
} else {
firstTime = false;
}
builder.append(appString);
}
vm.setAppList(builder.toString());
} else {
vm.setAppList("");
}
}
}
private static VMStatus convertToVmStatus(String statusName) {
VMStatus status = VMStatus.Unassigned;
// TODO: The following condition should deleted as soon as we drop compatibility with 3.3 since "Running" state
// will be replaced "Up" state and "Unknown" will exist no more. The "Up" state will be processed by
// EnumUtils as other states below.
if ("Running".equals(statusName) || "Unknown".equals(statusName)) {
status = VMStatus.Up;
}
else if ("Migration Source".equals(statusName)) {
status = VMStatus.MigratingFrom;
}
else if ("Migration Destination".equals(statusName)) {
status = VMStatus.MigratingTo;
} else {
try {
statusName = statusName.replace(" ", "");
status = EnumUtils.valueOf(VMStatus.class, statusName, true);
} catch (Exception e) {
log.errorFormat("Vm status: {0} illegal", statusName);
}
}
return status;
}
/**
* Updates the host network data with the network data reported by the host
*
* @param vds
* The host to update
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
public static void updateNetworkData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setActiveNic(AssignStringValue(xmlRpcStruct, VdsProperties.NETWORK_LAST_CLIENT_INTERFACE));
List<VdsNetworkInterface> oldInterfaces =
DbFacade.getInstance().getInterfaceDao().getAllInterfacesForVds(vds.getId());
vds.getInterfaces().clear();
addHostNetworkInterfaces(vds, xmlRpcStruct);
addHostVlanDevices(vds, xmlRpcStruct);
addHostBondDevices(vds, xmlRpcStruct);
addHostNetworksAndUpdateInterfaces(vds, xmlRpcStruct);
// set bonding options
setBondingOptions(vds, oldInterfaces);
// This information was added in 3.1, so don't use it if it's not there.
if (xmlRpcStruct.containsKey(VdsProperties.netConfigDirty)) {
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
}
}
private static void addHostNetworksAndUpdateInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct) {
// Networks collection (name point to list of nics or bonds)
Map<String, Object> networks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORKS);
if (networks != null) {
vds.getNetworks().clear();
for (Entry<String, Object> entry : networks.entrySet()) {
Map<String, Object> network = (Map<String, Object>) entry.getValue();
if (network != null) {
Network net = createNetworkData(entry.getKey(), network);
List<VdsNetworkInterface> interfaces = findNetworkInterfaces(vds, xmlRpcStruct, network);
for (VdsNetworkInterface iface : interfaces) {
updateNetworkDetailsInInterface(iface,
network,
vds,
net);
}
vds.getNetworks().add(net);
reportInvalidInterfacesForNetwork(interfaces, net, vds);
}
}
}
}
/**
* Reports a warning to the audit log if a bridge is connected to more than one interface which is considered bad
* configuration.
*
* @param interfaces
* The network's interfaces
* @param network
* The network to report for
* @param vds
* The host in which the network is defined
*/
private static void reportInvalidInterfacesForNetwork(List<VdsNetworkInterface> interfaces, Network network, VDS vds) {
if (interfaces.isEmpty()) {
AuditLogDirector.log(createHostNetworkAuditLog(network, vds), AuditLogType.NETWORK_WITHOUT_INTERFACES);
} else if (interfaces.size() > 1) {
AuditLogableBase logable = createHostNetworkAuditLog(network, vds);
logable.addCustomValue("Interfaces", StringUtils.join(Entities.objectNames(interfaces), ","));
AuditLogDirector.log(logable, AuditLogType.BRIDGED_NETWORK_OVER_MULTIPLE_INTERFACES);
}
}
protected static AuditLogableBase createHostNetworkAuditLog(Network network, VDS vds) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("NetworkName", network.getName());
return logable;
}
private static List<VdsNetworkInterface> findNetworkInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct,
Map<String, Object> network) {
Map<String, VdsNetworkInterface> vdsInterfaces = Entities.entitiesByName(vds.getInterfaces());
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
if (FeatureSupported.bridgesReportByVdsm(vds.getVdsGroupCompatibilityVersion())) {
VdsNetworkInterface iface = null;
String interfaceName = (String) network.get(VdsProperties.INTERFACE);
if (interfaceName != null) {
iface = vdsInterfaces.get(interfaceName);
if (iface == null) {
Map<String, Object> bridges =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BRIDGES);
if (bridges != null && bridges.containsKey(interfaceName)) {
interfaces.addAll(findBridgedNetworkInterfaces((Map<String, Object>) bridges.get(interfaceName),
vdsInterfaces));
}
} else {
interfaces.add(iface);
}
}
} else {
interfaces.addAll(findBridgedNetworkInterfaces(network, vdsInterfaces));
}
return interfaces;
}
private static Network createNetworkData(String networkName, Map<String, Object> network) {
Network net = new Network();
net.setName(networkName);
net.setAddr((String) network.get("addr"));
net.setSubnet((String) network.get("netmask"));
net.setGateway((String) network.get(VdsProperties.GLOBAL_GATEWAY));
if (StringUtils.isNotBlank((String) network.get(VdsProperties.MTU))) {
net.setMtu(Integer.parseInt((String) network.get(VdsProperties.MTU)));
}
return net;
}
private static List<VdsNetworkInterface> findBridgedNetworkInterfaces(Map<String, Object> bridge,
Map<String, VdsNetworkInterface> vdsInterfaces) {
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
Object[] ports = (Object[]) bridge.get("ports");
if (ports != null) {
for (Object port : ports) {
if (vdsInterfaces.containsKey(port.toString())) {
interfaces.add(vdsInterfaces.get(port.toString()));
}
}
}
return interfaces;
}
private static void addHostBondDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> bonds = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BONDINGS);
if (bonds != null) {
for (Entry<String, Object> entry : bonds.entrySet()) {
VdsNetworkInterface iface = new VdsNetworkInterface();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iStats.setVdsId(vds.getId());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
iface.setBonded(true);
Map<String, Object> bond = (Map<String, Object>) entry.getValue();
if (bond != null) {
iface.setMacAddress((String) bond.get("hwaddr"));
iface.setAddress((String) bond.get("addr"));
iface.setSubnet((String) bond.get("netmask"));
if (bond.get("slaves") != null) {
addBondDeviceToHost(vds, iface, (Object[]) bond.get("slaves"));
}
if (StringUtils.isNotBlank((String) bond.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) bond.get(VdsProperties.MTU)));
}
Map<String, Object> config =
(Map<String, Object>) bond.get("cfg");
if (config != null && config.get("BONDING_OPTS") != null) {
iface.setBondOptions(config.get("BONDING_OPTS").toString());
}
addBootProtocol(config, vds, iface);
}
}
}
}
/**
* Updates the host interfaces list with vlan devices
*
* @param vds
* The host to update
* @param xmlRpcStruct
* a map contains pairs of vlan device name and vlan data
*/
private static void addHostVlanDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
// vlans
Map<String, Object> vlans = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_VLANS);
if (vlans != null) {
for (Entry<String, Object> entry : vlans.entrySet()) {
VdsNetworkInterface iface = new VdsNetworkInterface();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
String vlanDeviceName = entry.getKey();
iface.setName(vlanDeviceName);
iface.setVdsId(vds.getId());
Map<String, Object> vlan = (Map<String, Object>) entry.getValue();
if (vlan.get(VdsProperties.VLAN_ID) != null && vlan.get(VdsProperties.BASE_INTERFACE) != null) {
iface.setVlanId((Integer) vlan.get(VdsProperties.VLAN_ID));
iface.setBaseInterface((String) vlan.get(VdsProperties.BASE_INTERFACE));
} else if (vlanDeviceName.contains(".")) {
String[] names = vlanDeviceName.split("[.]", -1);
String vlanId = names[1];
iface.setVlanId(Integer.parseInt(vlanId));
iface.setBaseInterface(names[0]);
}
iface.setAddress((String) vlan.get("addr"));
iface.setSubnet((String) vlan.get("netmask"));
if (StringUtils.isNotBlank((String) vlan.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) vlan.get(VdsProperties.MTU)));
}
iStats.setVdsId(vds.getId());
addBootProtocol((Map<String, Object>) vlan.get("cfg"), vds, iface);
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates the host network interfaces with the collected data from the host
*
* @param vds
* The host to update its interfaces
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
private static void addHostNetworkInterfaces(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> nics = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_NICS);
if (nics != null) {
for (Entry<String, Object> entry : nics.entrySet()) {
VdsNetworkInterface iface = new VdsNetworkInterface();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
updateNetworkInterfaceDataFromHost(iface, vds, (Map<String, Object>) entry.getValue());
iStats.setVdsId(vds.getId());
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates a given interface by data as collected from the host.
*
* @param iface
* The interface to update
* @param nic
* A key-value map of the interface properties and their value
*/
private static void updateNetworkInterfaceDataFromHost(
VdsNetworkInterface iface, VDS host, Map<String, Object> nic) {
if (nic != null) {
if (nic.get("speed") != null) {
Object speed = nic.get("speed");
iface.setSpeed((Integer) speed);
}
iface.setAddress((String) nic.get("addr"));
iface.setSubnet((String) nic.get("netmask"));
iface.setMacAddress((String) nic.get("hwaddr"));
// if we get "permhwaddr", we are a part of a bond and we use that as the mac address
String mac = (String) nic.get("permhwaddr");
if (mac != null) {
//TODO remove when the minimal supported vdsm version is >=3.6
// in older VDSM version, slave's Mac is in upper case
iface.setMacAddress(mac.toLowerCase());
}
if (StringUtils.isNotBlank((String) nic.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) nic.get(VdsProperties.MTU)));
}
addBootProtocol((Map<String, Object>) nic.get("cfg"), host, iface);
}
}
/**
* Update the network details on a given interface.
*
* @param iface
* The interface to update.
* @param network
* Network struct to get details from.
* @param net
* Network to get details from.
*/
private static void updateNetworkDetailsInInterface(VdsNetworkInterface iface,
Map<String, Object> network,
VDS host,
Network net) {
if (iface != null) {
iface.setNetworkName(net.getName());
// set the management ip
if (StringUtils.equals(iface.getNetworkName(), NetworkUtils.getEngineNetwork())) {
iface.setType(iface.getType() | VdsInterfaceType.MANAGEMENT.getValue());
}
iface.setAddress(net.getAddr());
iface.setSubnet(net.getSubnet());
boolean bridgedNetwork = isBridgedNetwork(network);
iface.setBridged(bridgedNetwork);
setGatewayIfNecessary(iface, host, net.getGateway());
if (bridgedNetwork) {
Map<String, Object> networkConfig = (Map<String, Object>) network.get("cfg");
addBootProtocol(networkConfig, host, iface);
}
if (FeatureSupported.hostNetworkQos(Collections.max(host.getSupportedClusterVersionsSet()))) {
NetworkQosMapper qosMapper =
new NetworkQosMapper(network, VdsProperties.HOST_QOS_INBOUND, VdsProperties.HOST_QOS_OUTBOUND);
iface.setQos(qosMapper.deserialize());
}
}
}
/**
* Returns true if vdsm doesn't report the 'bridged' attribute or if reported - its actual value.<br>
* The assumption is bridge-less network isn't supported if the 'bridged' attribute wasn't reported.<br>
* Bridge-less networks must report 'false' for this property.
*
* @param network
* The network to evaluate its bridge attribute
* @return true is no attribute is reported or its actual value
*/
private static boolean isBridgedNetwork(Map<String, Object> network) {
return network.get("bridged") == null || Boolean.parseBoolean(network.get("bridged").toString());
}
// we check for old bonding options,
// if we had value for the bonding options, i.e. the user set it by the UI
// and we have host that is not returning it's bonding options(host below 2.2.4) we override
// the "new" bonding options with the old one only if we have the new one as null and the old one is not
private static void setBondingOptions(VDS vds, List<VdsNetworkInterface> oldInterfaces) {
for (VdsNetworkInterface iface : oldInterfaces) {
if (iface.getBondOptions() != null) {
for (VdsNetworkInterface newIface : vds.getInterfaces()) {
if (iface.getName().equals(newIface.getName()) && newIface.getBondOptions() == null) {
newIface.setBondOptions(iface.getBondOptions());
break;
}
}
}
}
}
private static void addBootProtocol(Map<String, Object> cfg, VDS host, VdsNetworkInterface iface) {
NetworkBootProtocol bootproto = NetworkBootProtocol.NONE;
if (cfg != null) {
String bootProtocol = (String) cfg.get("BOOTPROTO");
if (bootProtocol != null) {
if (bootProtocol.toLowerCase().equals("dhcp")) {
bootproto = NetworkBootProtocol.DHCP;
} else if (bootProtocol.toLowerCase().equals("none") || bootProtocol.toLowerCase().equals("static")) {
if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
}
} else if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
if (bootproto == NetworkBootProtocol.STATIC_IP) {
String gateway = (String) cfg.get(VdsProperties.GATEWAY);
if (StringUtils.isNotEmpty(gateway)) {
setGatewayIfNecessary(iface, host, gateway.toString());
}
}
}
iface.setBootProtocol(bootproto);
}
private static void addBondDeviceToHost(VDS vds, VdsNetworkInterface iface, Object[] interfaces) {
vds.getInterfaces().add(iface);
if (interfaces != null) {
for (Object name : interfaces) {
for (VdsNetworkInterface tempInterface : vds.getInterfaces()) {
if (tempInterface.getName().equals(name.toString())) {
tempInterface.setBondName(iface.getName());
break;
}
}
}
}
}
/**
* Store the gateway for either of these cases:
* 1. any host network, in a cluster that supports multiple gateways
* 2. management network, no matter the cluster compatibility version
* 3. the active interface (could happen when there is no management network yet)
* If gateway was provided for non-management network when multiple gateways aren't supported, its value should be ignored.
*
* @param iface
* the host network interface
* @param host
* the host whose interfaces are being edited
* @param gateway
* the gateway value to be set
*/
private static void setGatewayIfNecessary(VdsNetworkInterface iface, VDS host, String gateway) {
if (FeatureSupported.multipleGatewaysSupported(host.getVdsGroupCompatibilityVersion())
|| NetworkUtils.getEngineNetwork().equals(iface.getNetworkName())
|| iface.getName().equals(host.getActiveNic())) {
iface.setGateway(gateway);
}
}
/**
* Creates a list of {@link VmGuestAgentInterface} from the {@link VdsProperties.GuestNetworkInterfaces}
*
* @param vmId
* the Vm's ID which contains the interfaces
*
* @param xmlRpcStruct
* the xml structure that describes the VM as reported by VDSM
* @return a list of {@link VmGuestAgentInterface} or null if no guest vNics were reported
*/
public static List<VmGuestAgentInterface> buildVmGuestAgentInterfacesData(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK_INTERFACES)) {
return null;
}
List<VmGuestAgentInterface> interfaces = new ArrayList<VmGuestAgentInterface>();
for (Object ifaceStruct : (Object[]) xmlRpcStruct.get(VdsProperties.VM_NETWORK_INTERFACES)) {
VmGuestAgentInterface nic = new VmGuestAgentInterface();
Map ifaceMap = (Map) ifaceStruct;
nic.setInterfaceName(AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_NAME));
nic.setMacAddress(getMacAddress(ifaceMap));
nic.setIpv4Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV4_ADDRESSES));
nic.setIpv6Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV6_ADDRESSES));
nic.setVmId(vmId);
interfaces.add(nic);
}
return interfaces;
}
private static String getMacAddress(Map<String, Object> ifaceMap) {
String macAddress = AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_MAC_ADDRESS);
return macAddress != null ? macAddress.replace('-', ':') : null;
}
/**
* Build through the received NUMA nodes information
* @param vds
* @param xmlRpcStruct
*/
private static void updateNumaNodesData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.AUTO_NUMA)) {
vds.getDynamicData().setAutoNumaBalancing(AutoNumaBalanceStatus.forValue(
AssignIntValue(xmlRpcStruct, VdsProperties.AUTO_NUMA)));
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODES)) {
Map<String, Map<String, Object>> numaNodeMap =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NUMA_NODES);
Map<String, Object> numaNodeDistanceMap =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NUMA_NODE_DISTANCE);
List<VdsNumaNode> newNumaNodeList = new ArrayList<>(numaNodeMap.size());
for (Map.Entry<String, Map<String, Object>> item : numaNodeMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
Map<String, Object> itemMap = item.getValue();
List<Integer> cpuIds = extractIntegerList(itemMap, VdsProperties.NUMA_NODE_CPU_LIST);
long memTotal = AssignLongValue(itemMap, VdsProperties.NUMA_NODE_TOTAL_MEM);
VdsNumaNode numaNode = new VdsNumaNode();
numaNode.setIndex(index);
if (cpuIds != null) {
numaNode.setCpuIds(cpuIds);
}
numaNode.setMemTotal(memTotal);
newNumaNodeList.add(numaNode);
}
for (Map.Entry<String, Object> item : numaNodeDistanceMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
List<Integer> distances = extractIntegerList(numaNodeDistanceMap, item.getKey());
Map<Integer, Integer> distanceMap = new HashMap<>(distances.size());
for (int i = 0; i < distances.size(); i++) {
distanceMap.put(i, distances.get(i));
}
VdsNumaNode newNumaNode = NumaUtils.getVdsNumaNodeByIndex(newNumaNodeList, index);
if (newNumaNode != null) {
newNumaNode.setNumaNodeDistances(distanceMap);
}
}
vds.getDynamicData().setNumaNodeList(newNumaNodeList);
vds.setNumaSupport(newNumaNodeList.size() > 1);
}
}
private static List<String> extracStringtList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<String> list = new ArrayList<String>();
for (Object item : items) {
list.add((String) item);
}
return list;
}
private static List<Integer> extractIntegerList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<Integer> list = new ArrayList<Integer>();
for (Object item : items) {
list.add((Integer) item);
}
return list;
}
private static final Log log = LogFactory.getLog(VdsBrokerObjectsBuilder.class);
}
| backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/vdsbroker/VdsBrokerObjectsBuilder.java | package org.ovirt.engine.core.vdsbroker.vdsbroker;
import java.nio.file.Paths;
import java.text.DateFormat;
import java.text.DecimalFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Calendar;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.ovirt.engine.core.common.AuditLogType;
import org.ovirt.engine.core.common.FeatureSupported;
import org.ovirt.engine.core.common.businessentities.AutoNumaBalanceStatus;
import org.ovirt.engine.core.common.businessentities.CpuStatistics;
import org.ovirt.engine.core.common.businessentities.DiskImageDynamic;
import org.ovirt.engine.core.common.businessentities.DisplayType;
import org.ovirt.engine.core.common.businessentities.Entities;
import org.ovirt.engine.core.common.businessentities.KdumpStatus;
import org.ovirt.engine.core.common.businessentities.LUNs;
import org.ovirt.engine.core.common.businessentities.NumaNodeStatistics;
import org.ovirt.engine.core.common.businessentities.SessionState;
import org.ovirt.engine.core.common.businessentities.StoragePool;
import org.ovirt.engine.core.common.businessentities.StorageType;
import org.ovirt.engine.core.common.businessentities.VDS;
import org.ovirt.engine.core.common.businessentities.VDSDomainsData;
import org.ovirt.engine.core.common.businessentities.VMStatus;
import org.ovirt.engine.core.common.businessentities.VdsNumaNode;
import org.ovirt.engine.core.common.businessentities.VdsTransparentHugePagesState;
import org.ovirt.engine.core.common.businessentities.VmBalloonInfo;
import org.ovirt.engine.core.common.businessentities.VmBlockJob;
import org.ovirt.engine.core.common.businessentities.VmBlockJobType;
import org.ovirt.engine.core.common.businessentities.VmDynamic;
import org.ovirt.engine.core.common.businessentities.VmExitReason;
import org.ovirt.engine.core.common.businessentities.VmExitStatus;
import org.ovirt.engine.core.common.businessentities.VmGuestAgentInterface;
import org.ovirt.engine.core.common.businessentities.VmJob;
import org.ovirt.engine.core.common.businessentities.VmJobState;
import org.ovirt.engine.core.common.businessentities.VmJobType;
import org.ovirt.engine.core.common.businessentities.VmPauseStatus;
import org.ovirt.engine.core.common.businessentities.VmRngDevice;
import org.ovirt.engine.core.common.businessentities.VmStatistics;
import org.ovirt.engine.core.common.businessentities.network.InterfaceStatus;
import org.ovirt.engine.core.common.businessentities.network.Network;
import org.ovirt.engine.core.common.businessentities.network.NetworkBootProtocol;
import org.ovirt.engine.core.common.businessentities.network.VdsInterfaceType;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkInterface;
import org.ovirt.engine.core.common.businessentities.network.VdsNetworkStatistics;
import org.ovirt.engine.core.common.businessentities.network.VmNetworkInterface;
import org.ovirt.engine.core.common.config.Config;
import org.ovirt.engine.core.common.config.ConfigValues;
import org.ovirt.engine.core.common.utils.EnumUtils;
import org.ovirt.engine.core.common.utils.SizeConverter;
import org.ovirt.engine.core.compat.Guid;
import org.ovirt.engine.core.compat.RpmVersion;
import org.ovirt.engine.core.dal.dbbroker.DbFacade;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogDirector;
import org.ovirt.engine.core.dal.dbbroker.auditloghandling.AuditLogableBase;
import org.ovirt.engine.core.utils.NetworkUtils;
import org.ovirt.engine.core.utils.NumaUtils;
import org.ovirt.engine.core.utils.SerializationFactory;
import org.ovirt.engine.core.utils.log.Log;
import org.ovirt.engine.core.utils.log.LogFactory;
/**
* This class encapsulate the knowledge of how to create objects from the VDS RPC protocol response.
* This class has methods that receive XmlRpcStruct and construct the following Classes: VmDynamic VdsDynamic VdsStatic.
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public class VdsBrokerObjectsBuilder {
private final static int VNC_START_PORT = 5900;
private final static double NANO_SECONDS = 1000000000;
public static VmDynamic buildVMDynamicDataFromList(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vmdynamic.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vmdynamic.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
return vmdynamic;
}
public static VmDynamic buildVMDynamicData(Map<String, Object> xmlRpcStruct) {
VmDynamic vmdynamic = new VmDynamic();
updateVMDynamicData(vmdynamic, xmlRpcStruct);
return vmdynamic;
}
public static StoragePool buildStoragePool(Map<String, Object> xmlRpcStruct) {
StoragePool sPool = new StoragePool();
if (xmlRpcStruct.containsKey("type")) {
sPool.setIsLocal(StorageType.valueOf(xmlRpcStruct.get("type").toString()).isLocal());
}
sPool.setName(AssignStringValue(xmlRpcStruct, "name"));
Integer masterVersion = AssignIntValue(xmlRpcStruct, "master_ver");
if (masterVersion != null) {
sPool.setmaster_domain_version(masterVersion);
}
return sPool;
}
public static VmStatistics buildVMStatisticsData(Map<String, Object> xmlRpcStruct) {
VmStatistics vmStatistics = new VmStatistics();
updateVMStatisticsData(vmStatistics, xmlRpcStruct);
return vmStatistics;
}
public static Map<String, LUNs> buildVmLunDisksData(Map<String, Object> xmlRpcStruct) {
Map<String, Object> disks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_disks);
Map<String, LUNs> lunsMap = new HashMap<>();
if (disks != null) {
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
String lunGuidString = AssignStringValue(disk, VdsProperties.lun_guid);
if (!StringUtils.isEmpty(lunGuidString)) {
LUNs lun = new LUNs();
lun.setLUN_id(lunGuidString);
if (disk.containsKey(VdsProperties.disk_true_size)) {
long sizeInBytes = AssignLongValue(disk, VdsProperties.disk_true_size);
int sizeInGB = SizeConverter.convert(
sizeInBytes, SizeConverter.SizeUnit.BYTES, SizeConverter.SizeUnit.GB).intValue();
lun.setDeviceSize(sizeInGB);
}
lunsMap.put(lunGuidString, lun);
}
}
}
return lunsMap;
}
public static void updateVMDynamicData(VmDynamic vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
if (xmlRpcStruct.containsKey(VdsProperties.session)) {
String session = (String) xmlRpcStruct.get(VdsProperties.session);
try {
vm.setSession(SessionState.valueOf(session));
} catch (Exception e) {
log.errorFormat("vm session value illegal : {0}", session);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.kvmEnable)) {
vm.setKvmEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.kvmEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.acpiEnable)) {
vm.setAcpiEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.acpiEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.win2kHackEnable)) {
vm.setWin2kHackEnable(Boolean.parseBoolean((String) xmlRpcStruct.get(VdsProperties.win2kHackEnable)));
}
if (xmlRpcStruct.containsKey(VdsProperties.status)) {
vm.setStatus(convertToVmStatus((String) xmlRpcStruct.get(VdsProperties.status)));
}
if (xmlRpcStruct.containsKey(VdsProperties.display_port)) {
try {
vm.setDisplay(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_port).toString()));
} catch (NumberFormatException e) {
log.errorFormat("vm display_port value illegal : {0}", xmlRpcStruct.get(VdsProperties.display_port));
}
} else if (xmlRpcStruct.containsKey(VdsProperties.display)) {
try {
vm.setDisplay(VNC_START_PORT + Integer.parseInt(xmlRpcStruct.get(VdsProperties.display).toString()));
} catch (NumberFormatException e) {
log.errorFormat("vm display value illegal : {0}", xmlRpcStruct.get(VdsProperties.display));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.display_secure_port)) {
try {
vm.setDisplaySecurePort(Integer.parseInt(xmlRpcStruct.get(VdsProperties.display_secure_port)
.toString()));
} catch (NumberFormatException e) {
log.errorFormat("vm display_secure_port value illegal : {0}",
xmlRpcStruct.get(VdsProperties.display_secure_port));
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayType))) {
String displayType = xmlRpcStruct.get(VdsProperties.displayType).toString();
try {
vm.setDisplayType(DisplayType.valueOf(displayType));
} catch (Exception e2) {
log.errorFormat("vm display type value illegal : {0}", displayType);
}
}
if (xmlRpcStruct.containsKey((VdsProperties.displayIp))) {
vm.setDisplayIp((String) xmlRpcStruct.get(VdsProperties.displayIp));
}
if (xmlRpcStruct.containsKey((VdsProperties.utc_diff))) {
String utc_diff = xmlRpcStruct.get(VdsProperties.utc_diff).toString();
if (utc_diff.startsWith("+")) {
utc_diff = utc_diff.substring(1);
}
try {
vm.setUtcDiff(Integer.parseInt(utc_diff));
} catch (NumberFormatException e) {
log.errorFormat("vm offset (utc_diff) value illegal : {0}", utc_diff);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.hash)) {
String hash = (String) xmlRpcStruct.get(VdsProperties.hash);
try {
vm.setHash(hash);
} catch (Exception e) {
log.errorFormat("vm hash value illegal : {0}", hash);
}
}
/**
* vm disks
*/
if (xmlRpcStruct.containsKey(VdsProperties.vm_disks)) {
initDisks(xmlRpcStruct, vm);
}
// ------------- vm internal agent data
vm.setGuestLastLoginTime(AssignDateTImeFromEpoch(xmlRpcStruct, VdsProperties.guest_last_login_time));
vm.setVmHost(AssignStringValue(xmlRpcStruct, VdsProperties.vm_host));
String guestUserName = AssignStringValue(xmlRpcStruct, VdsProperties.guest_cur_user_name);
vm.setGuestCurrentUserName(guestUserName);
initAppsList(xmlRpcStruct, vm);
vm.setGuestOs(AssignStringValue(xmlRpcStruct, VdsProperties.guest_os));
if (xmlRpcStruct.containsKey(VdsProperties.VM_FQDN)) {
vm.setVmFQDN(AssignStringValue(xmlRpcStruct, VdsProperties.VM_FQDN));
String fqdn = vm.getVmFQDN().trim();
if ("localhost".equalsIgnoreCase(fqdn) || "localhost.localdomain".equalsIgnoreCase(fqdn)) {
vm.setVmFQDN(null);
}
else {
vm.setVmFQDN(fqdn);
}
}
vm.setVmIp(AssignStringValue(xmlRpcStruct, VdsProperties.VM_IP));
if (vm.getVmIp() != null) {
if (vm.getVmIp().startsWith("127.0.")) {
vm.setVmIp(null);
} else {
vm.setVmIp(vm.getVmIp().trim());
}
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_code)) {
String exitCodeStr = xmlRpcStruct.get(VdsProperties.exit_code).toString();
vm.setExitStatus(VmExitStatus.forValue(Integer.parseInt(exitCodeStr)));
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_message)) {
String exitMsg = (String) xmlRpcStruct.get(VdsProperties.exit_message);
vm.setExitMessage(exitMsg);
}
if (xmlRpcStruct.containsKey(VdsProperties.exit_reason)) {
String exitReasonStr = xmlRpcStruct.get(VdsProperties.exit_reason).toString();
vm.setExitReason(VmExitReason.forValue(Integer.parseInt(exitReasonStr)));
} else {
vm.setExitReason(VmExitReason.Unknown);
}
// if monitorResponse returns negative it means its erroneous
if (xmlRpcStruct.containsKey(VdsProperties.monitorResponse)) {
int response = Integer.parseInt(xmlRpcStruct.get(VdsProperties.monitorResponse).toString());
if (response < 0) {
vm.setStatus(VMStatus.NotResponding);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.clientIp)) {
vm.setClientIp(xmlRpcStruct.get(VdsProperties.clientIp).toString());
}
VmPauseStatus pauseStatus = VmPauseStatus.NONE;
if (xmlRpcStruct.containsKey(VdsProperties.pauseCode)) {
String pauseCodeStr = (String) xmlRpcStruct.get(VdsProperties.pauseCode);
try {
pauseStatus = VmPauseStatus.valueOf(pauseCodeStr);
} catch (IllegalArgumentException ex) {
log.error("Error in parsing vm pause status. Setting value to NONE");
pauseStatus = VmPauseStatus.NONE;
}
}
vm.setPauseStatus(pauseStatus);
if (xmlRpcStruct.containsKey(VdsProperties.watchdogEvent)) {
Map<String, Object> watchdogStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.watchdogEvent);
double time = Double.parseDouble(watchdogStruct.get(VdsProperties.time).toString());
// vdsm may not send the action http://gerrit.ovirt.org/14134
String action =
watchdogStruct.containsKey(VdsProperties.action) ? watchdogStruct.get(VdsProperties.action)
.toString() : null;
vm.setLastWatchdogEvent((long) time);
vm.setLastWatchdogAction(action);
}
if (xmlRpcStruct.containsKey(VdsProperties.CDRom)) {
String isoName = Paths.get((String) xmlRpcStruct.get(VdsProperties.CDRom)).getFileName().toString();
vm.setCurrentCd(isoName);
}
if (xmlRpcStruct.containsKey(VdsProperties.GUEST_CPU_COUNT)) {
vm.setGuestCpuCount(AssignIntValue(xmlRpcStruct, VdsProperties.GUEST_CPU_COUNT));
}
}
public static void updateVMStatisticsData(VmStatistics vm, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.vm_guid)) {
vm.setId(new Guid((String) xmlRpcStruct.get(VdsProperties.vm_guid)));
}
vm.setelapsed_time(AssignDoubleValue(xmlRpcStruct, VdsProperties.elapsed_time));
// ------------- vm network statistics -----------------------
if (xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK)) {
Map networkStruct = (Map) xmlRpcStruct.get(VdsProperties.VM_NETWORK);
vm.setInterfaceStatistics(new ArrayList<VmNetworkInterface>());
for (Object tempNic : networkStruct.values()) {
Map nic = (Map) tempNic;
VmNetworkInterface stats = new VmNetworkInterface();
vm.getInterfaceStatistics().add(stats);
if (nic.containsKey(VdsProperties.VM_INTERFACE_NAME)) {
stats.setName((String) ((nic.get(VdsProperties.VM_INTERFACE_NAME) instanceof String) ? nic
.get(VdsProperties.VM_INTERFACE_NAME) : null));
}
Double rx_rate = AssignDoubleValue(nic, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(nic, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(nic, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(nic, VdsProperties.tx_dropped);
stats.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
stats.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
stats.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
stats.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
stats.setMacAddress((String) ((nic.get(VdsProperties.MAC_ADDR) instanceof String) ? nic
.get(VdsProperties.MAC_ADDR) : null));
stats.setSpeed(AssignIntValue(nic, VdsProperties.INTERFACE_SPEED));
}
}
if (xmlRpcStruct.containsKey(VdsProperties.VM_DISKS_USAGE)) {
initDisksUsage(xmlRpcStruct, vm);
}
// ------------- vm cpu statistics -----------------------
vm.setcpu_sys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vm.setcpu_user(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
// ------------- vm memory statistics -----------------------
vm.setusage_mem_percent(AssignIntValue(xmlRpcStruct, VdsProperties.vm_usage_mem_percent));
vm.setVmBalloonInfo(getBalloonInfo(xmlRpcStruct));
// ------------- vm migration statistics -----------------------
Integer migrationProgress = AssignIntValue(xmlRpcStruct, VdsProperties.vm_migration_progress_percent);
vm.setMigrationProgressPercent(migrationProgress != null ? migrationProgress : 0);
// ------------- vm jobs -------------
vm.setVmJobs(getVmJobs(vm.getId(), xmlRpcStruct));
}
private static VmBalloonInfo getBalloonInfo(Map<String, Object> xmlRpcStruct) {
Map<String, Object> balloonInfo = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.vm_balloonInfo);
VmBalloonInfo vmBalloonInfo = new VmBalloonInfo();
if (balloonInfo != null && balloonInfo.size() > 0) {
vmBalloonInfo.setCurrentMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_cur));
vmBalloonInfo.setBalloonMaxMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_max));
vmBalloonInfo.setBalloonTargetMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_target));
vmBalloonInfo.setBalloonMinMemory(AssignLongValue(balloonInfo, VdsProperties.vm_balloon_min));
if (balloonInfo.size() >= 4) { // only if all 4 properties are found the balloon is considered enabled (available from 3.3)
vmBalloonInfo.setBalloonDeviceEnabled(true);
}
} else {
vmBalloonInfo.setBalloonDeviceEnabled(false);
}
return vmBalloonInfo;
}
private static List<VmJob> getVmJobs(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.vmJobs)) {
return null;
}
List<VmJob> vmJobs = new ArrayList<VmJob>();
for (Object jobMap : ((Map<String, Object>) xmlRpcStruct.get(VdsProperties.vmJobs)).values()) {
VmJob job = buildVmJobData(vmId, (Map<String, Object>) jobMap);
vmJobs.add(job);
}
return vmJobs;
}
private static VmJob buildVmJobData(Guid vmId, Map<String, Object> xmlRpcStruct) {
VmJob ret;
VmJobType jobType = VmJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobType));
if (jobType == null) {
jobType = VmJobType.UNKNOWN;
}
switch (jobType) {
case BLOCK:
VmBlockJob blockJob = new VmBlockJob();
blockJob.setBlockJobType(VmBlockJobType.getByName(AssignStringValue(xmlRpcStruct, VdsProperties.vmBlockJobType)));
blockJob.setCursorCur(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorCur));
blockJob.setCursorEnd(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobCursorEnd));
blockJob.setBandwidth(AssignLongValue(xmlRpcStruct, VdsProperties.vmJobBandwidth));
blockJob.setImageGroupId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobImageUUID)));
ret = blockJob;
break;
default:
ret = new VmJob();
break;
}
ret.setVmId(vmId);
ret.setId(new Guid(AssignStringValue(xmlRpcStruct, VdsProperties.vmJobId)));
ret.setJobState(VmJobState.NORMAL);
ret.setJobType(jobType);
return ret;
}
public static void updateVDSDynamicData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setSupportedClusterLevels(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_cluster_levels));
updateNetworkData(vds, xmlRpcStruct);
updateNumaNodesData(vds, xmlRpcStruct);
vds.setCpuThreads(AssignIntValue(xmlRpcStruct, VdsProperties.cpuThreads));
vds.setCpuCores(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_cores));
vds.setCpuSockets(AssignIntValue(xmlRpcStruct, VdsProperties.cpu_sockets));
vds.setCpuModel(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_model));
vds.setCpuSpeedMh(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_speed_mh));
vds.setPhysicalMemMb(AssignIntValue(xmlRpcStruct, VdsProperties.physical_mem_mb));
vds.setKvmEnabled(AssignBoolValue(xmlRpcStruct, VdsProperties.kvm_enabled));
vds.setReservedMem(AssignIntValue(xmlRpcStruct, VdsProperties.reservedMem));
Integer guestOverhead = AssignIntValue(xmlRpcStruct, VdsProperties.guestOverhead);
vds.setGuestOverhead(guestOverhead != null ? guestOverhead : 0);
vds.setCpuFlags(AssignStringValue(xmlRpcStruct, VdsProperties.cpu_flags));
UpdatePackagesVersions(vds, xmlRpcStruct);
vds.setSupportedEngines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.supported_engines));
vds.setIScsiInitiatorName(AssignStringValue(xmlRpcStruct, VdsProperties.iSCSIInitiatorName));
vds.setSupportedEmulatedMachines(AssignStringValueFromArray(xmlRpcStruct, VdsProperties.emulatedMachines));
setRngSupportedSourcesToVds(vds, xmlRpcStruct);
String hooksStr = ""; // default value if hooks is not in the xml rpc struct
if (xmlRpcStruct.containsKey(VdsProperties.hooks)) {
hooksStr = xmlRpcStruct.get(VdsProperties.hooks).toString();
}
vds.setHooksStr(hooksStr);
// parse out the HBAs available in this host
Map<String, List<Map<String, String>>> hbas = new HashMap<>();
for (Map.Entry<String, Object[]> el: ((Map<String, Object[]>)xmlRpcStruct.get(VdsProperties.HBAInventory)).entrySet()) {
List<Map<String, String>> devicesList = new ArrayList<Map<String, String>>();
for (Object device: el.getValue()) {
devicesList.add((Map<String, String>)device);
}
hbas.put(el.getKey(), devicesList);
}
vds.setHBAs(hbas);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
vds.setKdumpStatus(KdumpStatus.valueOfNumber(AssignIntValue(xmlRpcStruct, VdsProperties.KDUMP_STATUS)));
Map<String, Object> selinux = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.selinux);
if (selinux != null) {
vds.setSELinuxEnforceMode(AssignIntValue(selinux, VdsProperties.selinux_mode));
} else {
vds.setSELinuxEnforceMode(null);
}
if (xmlRpcStruct.containsKey(VdsProperties.liveSnapshotSupport)) {
vds.setLiveSnapshotSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveSnapshotSupport));
}
if (xmlRpcStruct.containsKey(VdsProperties.liveMergeSupport)) {
vds.setLiveMergeSupport(AssignBoolValue(xmlRpcStruct, VdsProperties.liveMergeSupport));
} else {
vds.setLiveMergeSupport(false);
}
}
private static void setRngSupportedSourcesToVds(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.getSupportedRngSources().clear();
String rngSourcesFromStruct = AssignStringValueFromArray(xmlRpcStruct, VdsProperties.rngSources);
if (rngSourcesFromStruct != null) {
vds.getSupportedRngSources().addAll(VmRngDevice.csvToSourcesSet(rngSourcesFromStruct.toUpperCase()));
}
}
public static void checkTimeDrift(VDS vds, Map<String, Object> xmlRpcStruct) {
Boolean isHostTimeDriftEnabled = Config.getValue(ConfigValues.EnableHostTimeDrift);
if (isHostTimeDriftEnabled) {
Integer maxTimeDriftAllowed = Config.getValue(ConfigValues.HostTimeDriftInSec);
Date hostDate = AssignDatetimeValue(xmlRpcStruct, VdsProperties.hostDatetime);
if (hostDate != null) {
Long timeDrift =
TimeUnit.MILLISECONDS.toSeconds(Math.abs(hostDate.getTime() - System.currentTimeMillis()));
if (timeDrift > maxTimeDriftAllowed) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("Actual", timeDrift.toString());
logable.addCustomValue("Max", maxTimeDriftAllowed.toString());
AuditLogDirector.log(logable, AuditLogType.VDS_TIME_DRIFT_ALERT);
}
} else {
log.error("Time Drift validation: failed to get Host or Engine time.");
}
}
}
private static void initDisksUsage(Map<String, Object> vmStruct, VmStatistics vm) {
Object[] vmDisksUsage = (Object[]) vmStruct.get(VdsProperties.VM_DISKS_USAGE);
if (vmDisksUsage != null) {
ArrayList<Object> disksUsageList = new ArrayList<Object>(Arrays.asList(vmDisksUsage));
vm.setDisksUsage(SerializationFactory.getSerializer().serializeUnformattedJson(disksUsageList));
}
}
private static void UpdatePackagesVersions(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setVersionName(AssignStringValue(xmlRpcStruct, VdsProperties.version_name));
vds.setSoftwareVersion(AssignStringValue(xmlRpcStruct, VdsProperties.software_version));
vds.setBuildName(AssignStringValue(xmlRpcStruct, VdsProperties.build_name));
if (xmlRpcStruct.containsKey(VdsProperties.host_os)) {
vds.setHostOs(GetPackageVersionFormated(
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.host_os), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.packages)) {
// packages is an array of xmlRpcStruct (that each is a name, ver,
// release.. of a package)
for (Object hostPackageMap : (Object[]) xmlRpcStruct.get(VdsProperties.packages)) {
Map<String, Object> hostPackage = (Map<String, Object>) hostPackageMap;
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
if (VdsProperties.kvmPackageName.equals(packageName)) {
vds.setKvmVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.spicePackageName.equals(packageName)) {
vds.setSpiceVersion(GetPackageVersionFormated(hostPackage, false));
} else if (VdsProperties.kernelPackageName.equals(packageName)) {
vds.setKernelVersion(GetPackageVersionFormated(hostPackage, false));
}
}
} else if (xmlRpcStruct.containsKey(VdsProperties.packages2)) {
Map<String, Object> packages = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.packages2);
if (packages.containsKey(VdsProperties.vdsmPackageName)) {
Map<String, Object> vdsm = (Map<String, Object>) packages.get(VdsProperties.vdsmPackageName);
vds.setVersion(getPackageRpmVersion("vdsm", vdsm));
}
if (packages.containsKey(VdsProperties.qemuKvmPackageName)) {
Map<String, Object> kvm = (Map<String, Object>) packages.get(VdsProperties.qemuKvmPackageName);
vds.setKvmVersion(getPackageVersionFormated2(kvm));
}
if (packages.containsKey(VdsProperties.libvirtPackageName)) {
Map<String, Object> libvirt = (Map<String, Object>) packages.get(VdsProperties.libvirtPackageName);
vds.setLibvirtVersion(getPackageRpmVersion("libvirt", libvirt));
}
if (packages.containsKey(VdsProperties.spiceServerPackageName)) {
Map<String, Object> spice = (Map<String, Object>) packages.get(VdsProperties.spiceServerPackageName);
vds.setSpiceVersion(getPackageVersionFormated2(spice));
}
if (packages.containsKey(VdsProperties.kernelPackageName)) {
Map<String, Object> kernel = (Map<String, Object>) packages.get(VdsProperties.kernelPackageName);
vds.setKernelVersion(getPackageVersionFormated2(kernel));
}
if (packages.containsKey(VdsProperties.GLUSTER_PACKAGE_NAME)) {
Map<String, Object> gluster = (Map<String, Object>) packages.get(VdsProperties.GLUSTER_PACKAGE_NAME);
vds.setGlusterVersion(getPackageRpmVersion("glusterfs", gluster));
}
}
}
// Version 2 of GetPackageVersionFormated2:
// from 2.3 we get dictionary and not a flat list.
// from now the packages names (of spice, kernel, qemu and libvirt) are the same as far as VDSM and ENGINE.
// (VDSM use to report packages name of rpm so in RHEL6 when it change it broke our interface)
private static String getPackageVersionFormated2(Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageVersion)) {
sb.append(packageVersion);
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
private static RpmVersion getPackageRpmVersion(String packageName, Map<String, Object> hostPackage) {
String packageVersion = (hostPackage.get(VdsProperties.package_version) != null) ? (String) hostPackage
.get(VdsProperties.package_version) : null;
String packageRelease = (hostPackage.get(VdsProperties.package_release) != null) ? (String) hostPackage
.get(VdsProperties.package_release) : null;
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName)) {
sb.append(packageName);
}
boolean hasPackageVersion = StringUtils.isEmpty(packageVersion);
boolean hasPackageRelease = StringUtils.isEmpty(packageRelease);
if (!hasPackageVersion || !hasPackageRelease) {
sb.append("-");
}
if (!hasPackageVersion) {
sb.append(packageVersion);
}
if (!hasPackageRelease) {
if (sb.length() > 0) {
sb.append(String.format("-%1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return new RpmVersion(sb.toString());
}
public static void updateHardwareSystemInformation(Map<String, Object> hwInfo, VDS vds){
vds.setHardwareManufacturer(AssignStringValue(hwInfo, VdsProperties.hwManufacturer));
vds.setHardwareProductName(AssignStringValue(hwInfo, VdsProperties.hwProductName));
vds.setHardwareVersion(AssignStringValue(hwInfo, VdsProperties.hwVersion));
vds.setHardwareSerialNumber(AssignStringValue(hwInfo, VdsProperties.hwSerialNumber));
vds.setHardwareUUID(AssignStringValue(hwInfo, VdsProperties.hwUUID));
vds.setHardwareFamily(AssignStringValue(hwInfo, VdsProperties.hwFamily));
}
private static String GetPackageVersionFormated(Map<String, Object> hostPackage, boolean getName) {
String packageName = AssignStringValue(hostPackage, VdsProperties.package_name);
String packageVersion = AssignStringValue(hostPackage, VdsProperties.package_version);
String packageRelease = AssignStringValue(hostPackage, VdsProperties.package_release);
StringBuilder sb = new StringBuilder();
if (!StringUtils.isEmpty(packageName) && getName) {
sb.append(packageName);
}
if (!StringUtils.isEmpty(packageVersion)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageVersion));
} else {
sb.append(packageVersion);
}
}
if (!StringUtils.isEmpty(packageRelease)) {
if (sb.length() > 0) {
sb.append(String.format(" - %1$s", packageRelease));
} else {
sb.append(packageRelease);
}
}
return sb.toString();
}
public static void updateVDSStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
// ------------- vds memory usage ---------------------------
vds.setUsageMemPercent(AssignIntValue(xmlRpcStruct, VdsProperties.mem_usage));
// ------------- vds network statistics ---------------------
Map<String, Object> interfaces = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK);
if (interfaces != null) {
int networkUsage = 0;
Map<String, VdsNetworkInterface> nicsByName = Entities.entitiesByName(vds.getInterfaces());
for (Entry<String, Object> entry : interfaces.entrySet()) {
if (nicsByName.containsKey(entry.getKey())) {
VdsNetworkInterface iface = nicsByName.get(entry.getKey());
iface.setVdsId(vds.getId());
Map<String, Object> dict = (Map<String, Object>) entry.getValue();
Double rx_rate = AssignDoubleValue(dict, VdsProperties.rx_rate);
Double rx_dropped = AssignDoubleValue(dict, VdsProperties.rx_dropped);
Double tx_rate = AssignDoubleValue(dict, VdsProperties.tx_rate);
Double tx_dropped = AssignDoubleValue(dict, VdsProperties.tx_dropped);
iface.getStatistics().setReceiveRate(rx_rate != null ? rx_rate : 0);
iface.getStatistics().setReceiveDropRate(rx_dropped != null ? rx_dropped : 0);
iface.getStatistics().setTransmitRate(tx_rate != null ? tx_rate : 0);
iface.getStatistics().setTransmitDropRate(tx_dropped != null ? tx_dropped : 0);
iface.setSpeed(AssignIntValue(dict, VdsProperties.INTERFACE_SPEED));
iface.getStatistics().setStatus(AssignInterfaceStatusValue(dict, VdsProperties.iface_status));
if (!NetworkUtils.isVlan(iface) && !iface.isBondSlave()) {
networkUsage = (int) Math.max(networkUsage, computeInterfaceUsage(iface));
}
}
}
vds.setUsageNetworkPercent(networkUsage);
}
// ----------- vds cpu statistics info ---------------------
vds.setCpuSys(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_sys));
vds.setCpuUser(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_user));
if (vds.getCpuSys() != null && vds.getCpuUser() != null) {
vds.setUsageCpuPercent((int) (vds.getCpuSys() + vds.getCpuUser()));
}
// CPU load reported by VDSM is in uptime-style format, i.e. normalized
// to unity, so that say an 8% load is reported as 0.08
Double d = AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_load);
d = (d != null) ? d : 0;
vds.setCpuLoad(d.doubleValue() * 100.0);
vds.setCpuIdle(AssignDoubleValue(xmlRpcStruct, VdsProperties.cpu_idle));
vds.setMemAvailable(AssignLongValue(xmlRpcStruct, VdsProperties.mem_available));
vds.setMemFree(AssignLongValue(xmlRpcStruct, VdsProperties.memFree));
vds.setMemShared(AssignLongValue(xmlRpcStruct, VdsProperties.mem_shared));
vds.setSwapFree(AssignLongValue(xmlRpcStruct, VdsProperties.swap_free));
vds.setSwapTotal(AssignLongValue(xmlRpcStruct, VdsProperties.swap_total));
vds.setKsmCpuPercent(AssignIntValue(xmlRpcStruct, VdsProperties.ksm_cpu_percent));
vds.setKsmPages(AssignLongValue(xmlRpcStruct, VdsProperties.ksm_pages));
vds.setKsmState(AssignBoolValue(xmlRpcStruct, VdsProperties.ksm_state));
// dynamic data got from GetVdsStats
if (xmlRpcStruct.containsKey(VdsProperties.transparent_huge_pages_state)) {
vds.setTransparentHugePagesState(EnumUtils.valueOf(VdsTransparentHugePagesState.class, xmlRpcStruct
.get(VdsProperties.transparent_huge_pages_state).toString(), true));
}
if (xmlRpcStruct.containsKey(VdsProperties.anonymous_transparent_huge_pages)) {
vds.setAnonymousHugePages(AssignIntValue(xmlRpcStruct, VdsProperties.anonymous_transparent_huge_pages));
}
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
vds.setImagesLastCheck(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_check));
vds.setImagesLastDelay(AssignDoubleValue(xmlRpcStruct, VdsProperties.images_last_delay));
Integer vm_count = AssignIntValue(xmlRpcStruct, VdsProperties.vm_count);
vds.setVmCount(vm_count == null ? 0 : vm_count);
vds.setVmActive(AssignIntValue(xmlRpcStruct, VdsProperties.vm_active));
vds.setVmMigrating(AssignIntValue(xmlRpcStruct, VdsProperties.vm_migrating));
updateVDSDomainData(vds, xmlRpcStruct);
updateLocalDisksUsage(vds, xmlRpcStruct);
// hosted engine
Integer haScore = null;
Boolean haIsConfigured = null;
Boolean haIsActive = null;
Boolean haGlobalMaint = null;
Boolean haLocalMaint = null;
if (xmlRpcStruct.containsKey(VdsProperties.ha_stats)) {
Map<String, Object> haStats = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.ha_stats);
if (haStats != null) {
haScore = AssignIntValue(haStats, VdsProperties.ha_stats_score);
haIsConfigured = AssignBoolValue(haStats, VdsProperties.ha_stats_is_configured);
haIsActive = AssignBoolValue(haStats, VdsProperties.ha_stats_is_active);
haGlobalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_global_maintenance);
haLocalMaint = AssignBoolValue(haStats, VdsProperties.ha_stats_local_maintenance);
}
} else {
haScore = AssignIntValue(xmlRpcStruct, VdsProperties.ha_score);
// prior to 3.4, haScore was returned if ha was installed; assume active if > 0
if (haScore != null) {
haIsConfigured = true;
haIsActive = (haScore > 0);
}
}
vds.setHighlyAvailableScore(haScore != null ? haScore : 0);
vds.setHighlyAvailableIsConfigured(haIsConfigured != null ? haIsConfigured : false);
vds.setHighlyAvailableIsActive(haIsActive != null ? haIsActive : false);
vds.setHighlyAvailableGlobalMaintenance(haGlobalMaint != null ? haGlobalMaint : false);
vds.setHighlyAvailableLocalMaintenance(haLocalMaint != null ? haLocalMaint : false);
vds.setBootTime(AssignLongValue(xmlRpcStruct, VdsProperties.bootTime));
updateNumaStatisticsData(vds, xmlRpcStruct);
}
private static double computeInterfaceUsage(VdsNetworkInterface iface) {
return Math.max(truncatePercentage(iface.getStatistics().getReceiveRate()),
truncatePercentage(iface.getStatistics().getTransmitRate()));
}
private static double truncatePercentage(double value) {
return Math.min(100, value);
}
public static void updateNumaStatisticsData(VDS vds, Map<String, Object> xmlRpcStruct) {
List<VdsNumaNode> vdsNumaNodes = new ArrayList<>();
List<CpuStatistics> cpuStatsData = new ArrayList<>();
if (xmlRpcStruct.containsKey(VdsProperties.CPU_STATS)) {
Map<String, Map<String, Object>> cpuStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.CPU_STATS);
Map<Integer, List<CpuStatistics>> numaNodeCpuStats = new HashMap<>();
for (Map.Entry<String, Map<String, Object>> item : cpuStats.entrySet()) {
CpuStatistics data = buildVdsCpuStatistics(item);
cpuStatsData.add(data);
int numaNodeIndex = AssignIntValue(item.getValue(), VdsProperties.NUMA_NODE_INDEX);
if (!numaNodeCpuStats.containsKey(numaNodeIndex)) {
numaNodeCpuStats.put(numaNodeIndex, new ArrayList<CpuStatistics>());
}
numaNodeCpuStats.get(numaNodeIndex).add(data);
}
DecimalFormat percentageFormatter = new DecimalFormat("#.##");
for (Map.Entry<Integer, List<CpuStatistics>> item : numaNodeCpuStats.entrySet()) {
VdsNumaNode node = buildVdsNumaNodeStatistics(percentageFormatter, item);
vdsNumaNodes.add(node);
}
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODE_FREE_MEM_STAT)) {
Map<String, Map<String, Object>> memStats = (Map<String, Map<String, Object>>)
xmlRpcStruct.get(VdsProperties.NUMA_NODE_FREE_MEM_STAT);
for (Map.Entry<String, Map<String, Object>> item : memStats.entrySet()) {
VdsNumaNode node = NumaUtils.getVdsNumaNodeByIndex(vdsNumaNodes, Integer.valueOf(item.getKey()));
if (node != null) {
node.getNumaNodeStatistics().setMemFree(AssignLongValue(item.getValue(),
VdsProperties.NUMA_NODE_FREE_MEM));
node.getNumaNodeStatistics().setMemUsagePercent(AssignIntValue(item.getValue(),
VdsProperties.NUMA_NODE_MEM_PERCENT));
}
}
}
vds.getNumaNodeList().clear();
vds.getNumaNodeList().addAll(vdsNumaNodes);
vds.getStatisticsData().getCpuCoreStatistics().clear();
vds.getStatisticsData().getCpuCoreStatistics().addAll(cpuStatsData);
}
private static VdsNumaNode buildVdsNumaNodeStatistics(DecimalFormat percentageFormatter,
Map.Entry<Integer, List<CpuStatistics>> item) {
VdsNumaNode node = new VdsNumaNode();
NumaNodeStatistics nodeStat = new NumaNodeStatistics();
double nodeCpuUser = 0.0;
double nodeCpuSys = 0.0;
double nodeCpuIdle = 0.0;
for (CpuStatistics cpuStat : item.getValue()) {
nodeCpuUser += cpuStat.getCpuUser();
nodeCpuSys += cpuStat.getCpuSys();
nodeCpuIdle += cpuStat.getCpuIdle();
}
nodeStat.setCpuUser(Double.valueOf(percentageFormatter.format(nodeCpuUser / item.getValue().size())));
nodeStat.setCpuSys(Double.valueOf(percentageFormatter.format(nodeCpuSys / item.getValue().size())));
nodeStat.setCpuIdle(Double.valueOf(percentageFormatter.format(nodeCpuIdle / item.getValue().size())));
nodeStat.setCpuUsagePercent((int) (nodeStat.getCpuSys() + nodeStat.getCpuUser()));
node.setIndex(item.getKey());
node.setNumaNodeStatistics(nodeStat);
return node;
}
private static CpuStatistics buildVdsCpuStatistics(Map.Entry<String, Map<String, Object>> item) {
CpuStatistics data = new CpuStatistics();
data.setCpuId(Integer.valueOf(item.getKey()));
data.setCpuUser(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_USER));
data.setCpuSys(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_SYS));
data.setCpuIdle(AssignDoubleValue(item.getValue(), VdsProperties.NUMA_CPU_IDLE));
data.setCpuUsagePercent((int) (data.getCpuSys() + data.getCpuUser()));
return data;
}
/**
* Update {@link VDS#setLocalDisksUsage(Map)} with map of paths usage extracted from the returned returned value. The
* usage is reported in MB.
*
* @param vds
* The VDS object to update.
* @param xmlRpcStruct
* The XML/RPC to extract the usage from.
*/
protected static void updateLocalDisksUsage(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.DISK_STATS)) {
Map<String, Object> diskStatsStruct = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.DISK_STATS);
Map<String, Long> diskStats = new HashMap<String, Long>();
vds.setLocalDisksUsage(diskStats);
for (Entry<String, Object> entry : diskStatsStruct.entrySet()) {
Map<String, Object> pathStatsStruct = (Map<String, Object>) entry.getValue();
diskStats.put(entry.getKey(), AssignLongValue(pathStatsStruct, VdsProperties.DISK_STATS_FREE));
}
}
}
private static void updateVDSDomainData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.domains)) {
Map<String, Object> domains = (Map<String, Object>)
xmlRpcStruct.get(VdsProperties.domains);
ArrayList<VDSDomainsData> domainsData = new ArrayList<VDSDomainsData>();
for (Map.Entry<String, ?> value : domains.entrySet()) {
try {
VDSDomainsData data = new VDSDomainsData();
data.setDomainId(new Guid(value.getKey().toString()));
Map<String, Object> internalValue = (Map<String, Object>) value.getValue();
double lastCheck = 0;
data.setCode((Integer) (internalValue).get(VdsProperties.code));
if (internalValue.containsKey(VdsProperties.lastCheck)) {
lastCheck = Double.parseDouble((String) internalValue.get(VdsProperties.lastCheck));
}
data.setLastCheck(lastCheck);
double delay = 0;
if (internalValue.containsKey(VdsProperties.delay)) {
delay = Double.parseDouble((String) internalValue.get(VdsProperties.delay));
}
data.setDelay(delay);
domainsData.add(data);
} catch (Exception e) {
log.error("failed building domains", e);
}
}
vds.setDomains(domainsData);
}
}
private static InterfaceStatus AssignInterfaceStatusValue(Map<String, Object> input, String name) {
InterfaceStatus ifaceStatus = InterfaceStatus.NONE;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) {
if (stringValue.toLowerCase().trim().equals("up")) {
ifaceStatus = InterfaceStatus.UP;
} else {
ifaceStatus = InterfaceStatus.DOWN;
}
}
}
return ifaceStatus;
}
private static Double AssignDoubleValue(Map<String, Object> input, String name) {
Double returnValue = null;
if (input.containsKey(name)) {
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
returnValue = (stringValue == null) ? null : Double.parseDouble(stringValue);
}
return returnValue;
}
/**
* Do the same logic as AssignDoubleValue does, but instead, in case of null we return 0.
* @param input - the Input xml
* @param name - The name of the field we want to cast it to double.
* @return - the double value.
*/
private static Double assignDoubleValueWithNullProtection(Map<String, Object> input, String name) {
Double doubleValue = AssignDoubleValue(input, name);
return (doubleValue == null ? Double.valueOf(0.0) : doubleValue);
}
private static Integer AssignIntValue(Map input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Integer) {
return (Integer) input.get(name);
}
String stringValue = (String) input.get(name);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
int intValue = Integer.parseInt(stringValue);
return intValue;
} catch (NumberFormatException nfe) {
String errMsg = String.format("Failed to parse %1$s value %2$s to integer", name, stringValue);
log.error(errMsg, nfe);
}
}
return null;
}
private static Long AssignLongValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Long || input.get(name) instanceof Integer) {
return Long.parseLong(input.get(name).toString());
}
String stringValue = (String) ((input.get(name) instanceof String) ? input.get(name) : null);
if (!StringUtils.isEmpty(stringValue)) { // in case the input
// is decimal and we
// need int.
stringValue = stringValue.split("[.]", -1)[0];
}
try {
return Long.parseLong(stringValue);
} catch (NumberFormatException e) {
log.errorFormat("Failed to parse {0} value {1} to long", name, stringValue);
}
}
return null;
}
private static String AssignStringValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
return (String) ((input.get(name) instanceof String) ? input.get(name) : null);
}
return null;
}
private static String AssignStringValueFromArray(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
String[] arr = (String[]) ((input.get(name) instanceof String[]) ? input.get(name) : null);
if (arr == null) {
Object[] arr2 = (Object[]) ((input.get(name) instanceof Object[]) ? input.get(name) : null);
if (arr2 != null) {
arr = new String[arr2.length];
for (int i = 0; i < arr2.length; i++)
arr[i] = arr2[i].toString();
}
}
if (arr != null) {
return StringUtils.join(arr, ',');
}
}
return null;
}
private static Date AssignDateTImeFromEpoch(Map<String, Object> input, String name) {
Date retval = null;
try {
if (input.containsKey(name)) {
Double secsSinceEpoch = (Double) input.get(name);
Calendar calendar = Calendar.getInstance();
calendar.setTimeInMillis(secsSinceEpoch.longValue());
retval = calendar.getTime();
}
} catch (RuntimeException ex) {
String msg = String.format("VdsBroker::AssignDateTImeFromEpoch - failed to convert field %1$s to dateTime",
name);
log.warn(msg, ex);
retval = null;
}
return retval;
}
private static Date AssignDatetimeValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Date) {
return (Date) input.get(name);
}
DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss z");
try {
String dateStr = input.get(name).toString().replaceFirst("T", " ").trim();
return formatter.parse(dateStr);
} catch (ParseException e) {
e.printStackTrace();
}
}
return null;
}
private static Boolean AssignBoolValue(Map<String, Object> input, String name) {
if (input.containsKey(name)) {
if (input.get(name) instanceof Boolean) {
return (Boolean) input.get(name);
}
return Boolean.parseBoolean(input.get(name).toString());
}
return Boolean.FALSE;
}
private static void initDisks(Map<String, Object> vmStruct, VmDynamic vm) {
Map<String, Object> disks = (Map<String, Object>) vmStruct.get(VdsProperties.vm_disks);
ArrayList<DiskImageDynamic> disksData = new ArrayList<DiskImageDynamic>();
for (Object diskAsObj : disks.values()) {
Map<String, Object> disk = (Map<String, Object>) diskAsObj;
DiskImageDynamic diskData = new DiskImageDynamic();
String imageGroupIdString = AssignStringValue(disk, VdsProperties.image_group_id);
if (!StringUtils.isEmpty(imageGroupIdString)) {
Guid imageGroupIdGuid = new Guid(imageGroupIdString);
diskData.setId(imageGroupIdGuid);
diskData.setread_rate(AssignIntValue(disk, VdsProperties.vm_disk_read_rate));
diskData.setwrite_rate(AssignIntValue(disk, VdsProperties.vm_disk_write_rate));
if (disk.containsKey(VdsProperties.disk_actual_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_actual_size);
diskData.setactual_size(size != null ? size * 512 : 0);
} else if (disk.containsKey(VdsProperties.disk_true_size)) {
Long size = AssignLongValue(disk, VdsProperties.disk_true_size);
diskData.setactual_size(size != null ? size : 0);
}
if (disk.containsKey(VdsProperties.vm_disk_read_latency)) {
diskData.setReadLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_read_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_write_latency)) {
diskData.setWriteLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_write_latency) / NANO_SECONDS);
}
if (disk.containsKey(VdsProperties.vm_disk_flush_latency)) {
diskData.setFlushLatency(assignDoubleValueWithNullProtection(disk,
VdsProperties.vm_disk_flush_latency) / NANO_SECONDS);
}
disksData.add(diskData);
}
}
vm.setDisks(disksData);
}
private static void initAppsList(Map<String, Object> vmStruct, VmDynamic vm) {
if (vmStruct.containsKey(VdsProperties.app_list)) {
Object tempAppsList = vmStruct.get(VdsProperties.app_list);
if (tempAppsList instanceof Object[]) {
Object[] apps = (Object[]) tempAppsList;
StringBuilder builder = new StringBuilder();
boolean firstTime = true;
for (Object app : apps) {
String appString = (String) ((app instanceof String) ? app : null);
if (app == null) {
log.warn("Failed to convert app: [null] to string");
}
if (!firstTime) {
builder.append(",");
} else {
firstTime = false;
}
builder.append(appString);
}
vm.setAppList(builder.toString());
} else {
vm.setAppList("");
}
}
}
private static VMStatus convertToVmStatus(String statusName) {
VMStatus status = VMStatus.Unassigned;
// TODO: The following condition should deleted as soon as we drop compatibility with 3.3 since "Running" state
// will be replaced "Up" state and "Unknown" will exist no more. The "Up" state will be processed by
// EnumUtils as other states below.
if ("Running".equals(statusName) || "Unknown".equals(statusName)) {
status = VMStatus.Up;
}
else if ("Migration Source".equals(statusName)) {
status = VMStatus.MigratingFrom;
}
else if ("Migration Destination".equals(statusName)) {
status = VMStatus.MigratingTo;
} else {
try {
statusName = statusName.replace(" ", "");
status = EnumUtils.valueOf(VMStatus.class, statusName, true);
} catch (Exception e) {
log.errorFormat("Vm status: {0} illegal", statusName);
}
}
return status;
}
/**
* Updates the host network data with the network data reported by the host
*
* @param vds
* The host to update
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
public static void updateNetworkData(VDS vds, Map<String, Object> xmlRpcStruct) {
vds.setActiveNic(AssignStringValue(xmlRpcStruct, VdsProperties.NETWORK_LAST_CLIENT_INTERFACE));
List<VdsNetworkInterface> oldInterfaces =
DbFacade.getInstance().getInterfaceDao().getAllInterfacesForVds(vds.getId());
vds.getInterfaces().clear();
addHostNetworkInterfaces(vds, xmlRpcStruct);
addHostVlanDevices(vds, xmlRpcStruct);
addHostBondDevices(vds, xmlRpcStruct);
addHostNetworksAndUpdateInterfaces(vds, xmlRpcStruct);
// set bonding options
setBondingOptions(vds, oldInterfaces);
// This information was added in 3.1, so don't use it if it's not there.
if (xmlRpcStruct.containsKey(VdsProperties.netConfigDirty)) {
vds.setNetConfigDirty(AssignBoolValue(xmlRpcStruct, VdsProperties.netConfigDirty));
}
}
private static void addHostNetworksAndUpdateInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct) {
// Networks collection (name point to list of nics or bonds)
Map<String, Object> networks = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORKS);
if (networks != null) {
vds.getNetworks().clear();
for (Entry<String, Object> entry : networks.entrySet()) {
Map<String, Object> network = (Map<String, Object>) entry.getValue();
if (network != null) {
Network net = createNetworkData(entry.getKey(), network);
List<VdsNetworkInterface> interfaces = findNetworkInterfaces(vds, xmlRpcStruct, network);
for (VdsNetworkInterface iface : interfaces) {
updateNetworkDetailsInInterface(iface,
network,
vds,
net);
}
vds.getNetworks().add(net);
reportInvalidInterfacesForNetwork(interfaces, net, vds);
}
}
}
}
/**
* Reports a warning to the audit log if a bridge is connected to more than one interface which is considered bad
* configuration.
*
* @param interfaces
* The network's interfaces
* @param network
* The network to report for
* @param vds
* The host in which the network is defined
*/
private static void reportInvalidInterfacesForNetwork(List<VdsNetworkInterface> interfaces, Network network, VDS vds) {
if (interfaces.isEmpty()) {
AuditLogDirector.log(createHostNetworkAuditLog(network, vds), AuditLogType.NETWORK_WITHOUT_INTERFACES);
} else if (interfaces.size() > 1) {
AuditLogableBase logable = createHostNetworkAuditLog(network, vds);
logable.addCustomValue("Interfaces", StringUtils.join(Entities.objectNames(interfaces), ","));
AuditLogDirector.log(logable, AuditLogType.BRIDGED_NETWORK_OVER_MULTIPLE_INTERFACES);
}
}
protected static AuditLogableBase createHostNetworkAuditLog(Network network, VDS vds) {
AuditLogableBase logable = new AuditLogableBase(vds.getId());
logable.addCustomValue("NetworkName", network.getName());
return logable;
}
private static List<VdsNetworkInterface> findNetworkInterfaces(VDS vds,
Map<String, Object> xmlRpcStruct,
Map<String, Object> network) {
Map<String, VdsNetworkInterface> vdsInterfaces = Entities.entitiesByName(vds.getInterfaces());
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
if (FeatureSupported.bridgesReportByVdsm(vds.getVdsGroupCompatibilityVersion())) {
VdsNetworkInterface iface = null;
String interfaceName = (String) network.get(VdsProperties.INTERFACE);
if (interfaceName != null) {
iface = vdsInterfaces.get(interfaceName);
if (iface == null) {
Map<String, Object> bridges =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BRIDGES);
if (bridges != null && bridges.containsKey(interfaceName)) {
interfaces.addAll(findBridgedNetworkInterfaces((Map<String, Object>) bridges.get(interfaceName),
vdsInterfaces));
}
} else {
interfaces.add(iface);
}
}
} else {
interfaces.addAll(findBridgedNetworkInterfaces(network, vdsInterfaces));
}
return interfaces;
}
private static Network createNetworkData(String networkName, Map<String, Object> network) {
Network net = new Network();
net.setName(networkName);
net.setAddr((String) network.get("addr"));
net.setSubnet((String) network.get("netmask"));
net.setGateway((String) network.get(VdsProperties.GLOBAL_GATEWAY));
if (StringUtils.isNotBlank((String) network.get(VdsProperties.MTU))) {
net.setMtu(Integer.parseInt((String) network.get(VdsProperties.MTU)));
}
return net;
}
private static List<VdsNetworkInterface> findBridgedNetworkInterfaces(Map<String, Object> bridge,
Map<String, VdsNetworkInterface> vdsInterfaces) {
List<VdsNetworkInterface> interfaces = new ArrayList<VdsNetworkInterface>();
Object[] ports = (Object[]) bridge.get("ports");
if (ports != null) {
for (Object port : ports) {
if (vdsInterfaces.containsKey(port.toString())) {
interfaces.add(vdsInterfaces.get(port.toString()));
}
}
}
return interfaces;
}
private static void addHostBondDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> bonds = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_BONDINGS);
if (bonds != null) {
for (Entry<String, Object> entry : bonds.entrySet()) {
VdsNetworkInterface iface = new VdsNetworkInterface();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iStats.setVdsId(vds.getId());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
iface.setBonded(true);
Map<String, Object> bond = (Map<String, Object>) entry.getValue();
if (bond != null) {
iface.setMacAddress((String) bond.get("hwaddr"));
iface.setAddress((String) bond.get("addr"));
iface.setSubnet((String) bond.get("netmask"));
if (bond.get("slaves") != null) {
addBondDeviceToHost(vds, iface, (Object[]) bond.get("slaves"));
}
if (StringUtils.isNotBlank((String) bond.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) bond.get(VdsProperties.MTU)));
}
Map<String, Object> config =
(Map<String, Object>) bond.get("cfg");
if (config != null && config.get("BONDING_OPTS") != null) {
iface.setBondOptions(config.get("BONDING_OPTS").toString());
}
addBootProtocol(config, vds, iface);
}
}
}
}
/**
* Updates the host interfaces list with vlan devices
*
* @param vds
* The host to update
* @param xmlRpcStruct
* a map contains pairs of vlan device name and vlan data
*/
private static void addHostVlanDevices(VDS vds, Map<String, Object> xmlRpcStruct) {
// vlans
Map<String, Object> vlans = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_VLANS);
if (vlans != null) {
for (Entry<String, Object> entry : vlans.entrySet()) {
VdsNetworkInterface iface = new VdsNetworkInterface();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
String vlanDeviceName = entry.getKey();
iface.setName(vlanDeviceName);
iface.setVdsId(vds.getId());
Map<String, Object> vlan = (Map<String, Object>) entry.getValue();
if (vlan.get(VdsProperties.VLAN_ID) != null && vlan.get(VdsProperties.BASE_INTERFACE) != null) {
iface.setVlanId((Integer) vlan.get(VdsProperties.VLAN_ID));
iface.setBaseInterface((String) vlan.get(VdsProperties.BASE_INTERFACE));
} else if (vlanDeviceName.contains(".")) {
String[] names = vlanDeviceName.split("[.]", -1);
String vlanId = names[1];
iface.setVlanId(Integer.parseInt(vlanId));
iface.setBaseInterface(names[0]);
}
iface.setAddress((String) vlan.get("addr"));
iface.setSubnet((String) vlan.get("netmask"));
if (StringUtils.isNotBlank((String) vlan.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) vlan.get(VdsProperties.MTU)));
}
iStats.setVdsId(vds.getId());
addBootProtocol((Map<String, Object>) vlan.get("cfg"), vds, iface);
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates the host network interfaces with the collected data from the host
*
* @param vds
* The host to update its interfaces
* @param xmlRpcStruct
* A nested map contains network interfaces data
*/
private static void addHostNetworkInterfaces(VDS vds, Map<String, Object> xmlRpcStruct) {
Map<String, Object> nics = (Map<String, Object>) xmlRpcStruct.get(VdsProperties.NETWORK_NICS);
if (nics != null) {
for (Entry<String, Object> entry : nics.entrySet()) {
VdsNetworkInterface iface = new VdsNetworkInterface();
VdsNetworkStatistics iStats = new VdsNetworkStatistics();
iface.setStatistics(iStats);
iStats.setId(Guid.newGuid());
iface.setId(iStats.getId());
iface.setName(entry.getKey());
iface.setVdsId(vds.getId());
updateNetworkInterfaceDataFromHost(iface, vds, (Map<String, Object>) entry.getValue());
iStats.setVdsId(vds.getId());
vds.getInterfaces().add(iface);
}
}
}
/**
* Updates a given interface by data as collected from the host.
*
* @param iface
* The interface to update
* @param nic
* A key-value map of the interface properties and their value
*/
private static void updateNetworkInterfaceDataFromHost(
VdsNetworkInterface iface, VDS host, Map<String, Object> nic) {
if (nic != null) {
if (nic.get("speed") != null) {
Object speed = nic.get("speed");
iface.setSpeed((Integer) speed);
}
iface.setAddress((String) nic.get("addr"));
iface.setSubnet((String) nic.get("netmask"));
iface.setMacAddress((String) nic.get("hwaddr"));
// if we get "permhwaddr", we are a part of a bond and we use that as the mac address
if (nic.get("permhwaddr") != null) {
//TODO remove when the minimal supported vdsm version is >=3.6
// in older VDSM version, slave's Mac is in upper case
String macUpperCase = (String) nic.get("permhwaddr");
iface.setMacAddress(macUpperCase.toLowerCase());
}
if (StringUtils.isNotBlank((String) nic.get(VdsProperties.MTU))) {
iface.setMtu(Integer.parseInt((String) nic.get(VdsProperties.MTU)));
}
addBootProtocol((Map<String, Object>) nic.get("cfg"), host, iface);
}
}
/**
* Update the network details on a given interface.
*
* @param iface
* The interface to update.
* @param network
* Network struct to get details from.
* @param net
* Network to get details from.
*/
private static void updateNetworkDetailsInInterface(VdsNetworkInterface iface,
Map<String, Object> network,
VDS host,
Network net) {
if (iface != null) {
iface.setNetworkName(net.getName());
// set the management ip
if (StringUtils.equals(iface.getNetworkName(), NetworkUtils.getEngineNetwork())) {
iface.setType(iface.getType() | VdsInterfaceType.MANAGEMENT.getValue());
}
iface.setAddress(net.getAddr());
iface.setSubnet(net.getSubnet());
boolean bridgedNetwork = isBridgedNetwork(network);
iface.setBridged(bridgedNetwork);
setGatewayIfNecessary(iface, host, net.getGateway());
if (bridgedNetwork) {
Map<String, Object> networkConfig = (Map<String, Object>) network.get("cfg");
addBootProtocol(networkConfig, host, iface);
}
if (FeatureSupported.hostNetworkQos(Collections.max(host.getSupportedClusterVersionsSet()))) {
NetworkQosMapper qosMapper =
new NetworkQosMapper(network, VdsProperties.HOST_QOS_INBOUND, VdsProperties.HOST_QOS_OUTBOUND);
iface.setQos(qosMapper.deserialize());
}
}
}
/**
* Returns true if vdsm doesn't report the 'bridged' attribute or if reported - its actual value.<br>
* The assumption is bridge-less network isn't supported if the 'bridged' attribute wasn't reported.<br>
* Bridge-less networks must report 'false' for this property.
*
* @param network
* The network to evaluate its bridge attribute
* @return true is no attribute is reported or its actual value
*/
private static boolean isBridgedNetwork(Map<String, Object> network) {
return network.get("bridged") == null || Boolean.parseBoolean(network.get("bridged").toString());
}
// we check for old bonding options,
// if we had value for the bonding options, i.e. the user set it by the UI
// and we have host that is not returning it's bonding options(host below 2.2.4) we override
// the "new" bonding options with the old one only if we have the new one as null and the old one is not
private static void setBondingOptions(VDS vds, List<VdsNetworkInterface> oldInterfaces) {
for (VdsNetworkInterface iface : oldInterfaces) {
if (iface.getBondOptions() != null) {
for (VdsNetworkInterface newIface : vds.getInterfaces()) {
if (iface.getName().equals(newIface.getName()) && newIface.getBondOptions() == null) {
newIface.setBondOptions(iface.getBondOptions());
break;
}
}
}
}
}
private static void addBootProtocol(Map<String, Object> cfg, VDS host, VdsNetworkInterface iface) {
NetworkBootProtocol bootproto = NetworkBootProtocol.NONE;
if (cfg != null) {
String bootProtocol = (String) cfg.get("BOOTPROTO");
if (bootProtocol != null) {
if (bootProtocol.toLowerCase().equals("dhcp")) {
bootproto = NetworkBootProtocol.DHCP;
} else if (bootProtocol.toLowerCase().equals("none") || bootProtocol.toLowerCase().equals("static")) {
if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
}
} else if (StringUtils.isNotEmpty((String) cfg.get("IPADDR"))) {
bootproto = NetworkBootProtocol.STATIC_IP;
}
if (bootproto == NetworkBootProtocol.STATIC_IP) {
String gateway = (String) cfg.get(VdsProperties.GATEWAY);
if (StringUtils.isNotEmpty(gateway)) {
setGatewayIfNecessary(iface, host, gateway.toString());
}
}
}
iface.setBootProtocol(bootproto);
}
private static void addBondDeviceToHost(VDS vds, VdsNetworkInterface iface, Object[] interfaces) {
vds.getInterfaces().add(iface);
if (interfaces != null) {
for (Object name : interfaces) {
for (VdsNetworkInterface tempInterface : vds.getInterfaces()) {
if (tempInterface.getName().equals(name.toString())) {
tempInterface.setBondName(iface.getName());
break;
}
}
}
}
}
/**
* Store the gateway for either of these cases:
* 1. any host network, in a cluster that supports multiple gateways
* 2. management network, no matter the cluster compatibility version
* 3. the active interface (could happen when there is no management network yet)
* If gateway was provided for non-management network when multiple gateways aren't supported, its value should be ignored.
*
* @param iface
* the host network interface
* @param host
* the host whose interfaces are being edited
* @param gateway
* the gateway value to be set
*/
private static void setGatewayIfNecessary(VdsNetworkInterface iface, VDS host, String gateway) {
if (FeatureSupported.multipleGatewaysSupported(host.getVdsGroupCompatibilityVersion())
|| NetworkUtils.getEngineNetwork().equals(iface.getNetworkName())
|| iface.getName().equals(host.getActiveNic())) {
iface.setGateway(gateway);
}
}
/**
* Creates a list of {@link VmGuestAgentInterface} from the {@link VdsProperties.GuestNetworkInterfaces}
*
* @param vmId
* the Vm's ID which contains the interfaces
*
* @param xmlRpcStruct
* the xml structure that describes the VM as reported by VDSM
* @return a list of {@link VmGuestAgentInterface} or null if no guest vNics were reported
*/
public static List<VmGuestAgentInterface> buildVmGuestAgentInterfacesData(Guid vmId, Map<String, Object> xmlRpcStruct) {
if (!xmlRpcStruct.containsKey(VdsProperties.VM_NETWORK_INTERFACES)) {
return null;
}
List<VmGuestAgentInterface> interfaces = new ArrayList<VmGuestAgentInterface>();
for (Object ifaceStruct : (Object[]) xmlRpcStruct.get(VdsProperties.VM_NETWORK_INTERFACES)) {
VmGuestAgentInterface nic = new VmGuestAgentInterface();
Map ifaceMap = (Map) ifaceStruct;
nic.setInterfaceName(AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_NAME));
nic.setMacAddress(getMacAddress(ifaceMap));
nic.setIpv4Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV4_ADDRESSES));
nic.setIpv6Addresses(extracStringtList(ifaceMap, VdsProperties.VM_IPV6_ADDRESSES));
nic.setVmId(vmId);
interfaces.add(nic);
}
return interfaces;
}
private static String getMacAddress(Map<String, Object> ifaceMap) {
String macAddress = AssignStringValue(ifaceMap, VdsProperties.VM_INTERFACE_MAC_ADDRESS);
return macAddress != null ? macAddress.replace('-', ':') : null;
}
/**
* Build through the received NUMA nodes information
* @param vds
* @param xmlRpcStruct
*/
private static void updateNumaNodesData(VDS vds, Map<String, Object> xmlRpcStruct) {
if (xmlRpcStruct.containsKey(VdsProperties.AUTO_NUMA)) {
vds.getDynamicData().setAutoNumaBalancing(AutoNumaBalanceStatus.forValue(
AssignIntValue(xmlRpcStruct, VdsProperties.AUTO_NUMA)));
}
if (xmlRpcStruct.containsKey(VdsProperties.NUMA_NODES)) {
Map<String, Map<String, Object>> numaNodeMap =
(Map<String, Map<String, Object>>) xmlRpcStruct.get(VdsProperties.NUMA_NODES);
Map<String, Object> numaNodeDistanceMap =
(Map<String, Object>) xmlRpcStruct.get(VdsProperties.NUMA_NODE_DISTANCE);
List<VdsNumaNode> newNumaNodeList = new ArrayList<>(numaNodeMap.size());
for (Map.Entry<String, Map<String, Object>> item : numaNodeMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
Map<String, Object> itemMap = item.getValue();
List<Integer> cpuIds = extractIntegerList(itemMap, VdsProperties.NUMA_NODE_CPU_LIST);
long memTotal = AssignLongValue(itemMap, VdsProperties.NUMA_NODE_TOTAL_MEM);
VdsNumaNode numaNode = new VdsNumaNode();
numaNode.setIndex(index);
if (cpuIds != null) {
numaNode.setCpuIds(cpuIds);
}
numaNode.setMemTotal(memTotal);
newNumaNodeList.add(numaNode);
}
for (Map.Entry<String, Object> item : numaNodeDistanceMap.entrySet()) {
int index = Integer.valueOf(item.getKey());
List<Integer> distances = extractIntegerList(numaNodeDistanceMap, item.getKey());
Map<Integer, Integer> distanceMap = new HashMap<>(distances.size());
for (int i = 0; i < distances.size(); i++) {
distanceMap.put(i, distances.get(i));
}
VdsNumaNode newNumaNode = NumaUtils.getVdsNumaNodeByIndex(newNumaNodeList, index);
if (newNumaNode != null) {
newNumaNode.setNumaNodeDistances(distanceMap);
}
}
vds.getDynamicData().setNumaNodeList(newNumaNodeList);
vds.setNumaSupport(newNumaNodeList.size() > 1);
}
}
private static List<String> extracStringtList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<String> list = new ArrayList<String>();
for (Object item : items) {
list.add((String) item);
}
return list;
}
private static List<Integer> extractIntegerList(Map<String, Object> xmlRpcStruct, String propertyName) {
if (!xmlRpcStruct.containsKey(propertyName)){
return null;
}
Object[] items = (Object[]) xmlRpcStruct.get(propertyName);
if (items.length == 0) {
return null;
}
List<Integer> list = new ArrayList<Integer>();
for (Object item : items) {
list.add((Integer) item);
}
return list;
}
private static final Log log = LogFactory.getLog(VdsBrokerObjectsBuilder.class);
}
| engine:slave's mac in Broker, to lower case
slave's mac in older vdsm versions, is in upper case, resulting
slave's mac is saved and repressent in upper case
Change-Id: Ibfebc2aea4bc3605253bfb072fc4ad19c0fd2270
Bug-Url: https://bugzilla.redhat.com/1132422
Signed-off-by: Eliraz Levi <[email protected]>
| backend/manager/modules/vdsbroker/src/main/java/org/ovirt/engine/core/vdsbroker/vdsbroker/VdsBrokerObjectsBuilder.java | engine:slave's mac in Broker, to lower case |
|
Java | apache-2.0 | 5c6c646e6d0f86802ca440365c337683d07f68ef | 0 | katzer/cordova-plugin-printer,katzer/cordova-plugin-printer | /*
Copyright 2013-2016 appPlant GmbH
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package de.appplant.cordova.plugin.printer;
import android.app.Activity;
import android.content.Context;
import android.os.Build;
import android.print.PrintAttributes;
import android.print.PrintDocumentAdapter;
import android.print.PrintJob;
import android.print.PrintManager;
import android.view.View;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
/**
* Plugin to print HTML documents. Therefore it creates an invisible web view
* that loads the markup data. Once the page has been fully rendered it takes
* the print adapter of that web view and initializes a print job.
*/
public class Printer extends CordovaPlugin {
/**
* The web view that loads all the content.
*/
private WebView view;
/**
* Reference is necessary to invoke the callback in the onresume event.
* Without its not possible to determine the status of the job.
*/
private PrintJob job;
/**
* Reference is necessary to invoke the callback in the onresume event.
*/
private CallbackContext command;
/**
* Default name of the printed document (PDF-Printer).
*/
private static final String DEFAULT_DOC_NAME = "unknown";
/**
* Executes the request.
*
* This method is called from the WebView thread.
* To do a non-trivial amount of work, use:
* cordova.getThreadPool().execute(runnable);
*
* To run on the UI thread, use:
* cordova.getActivity().runOnUiThread(runnable);
*
* @param action The action to execute.
* @param args The exec() arguments in JSON form.
* @param callback The callback context used when calling back into JavaScript.
* @return Whether the action was valid.
*/
@Override
public boolean execute (String action, JSONArray args,
CallbackContext callback) throws JSONException {
command = callback;
if (action.equalsIgnoreCase("isAvailable")) {
isAvailable();
return true;
}
if (action.equalsIgnoreCase("print")) {
print(args);
return true;
}
return false;
}
/**
* Informs if the device is able to print documents.
* A Internet connection is required to load the cloud print dialog.
*/
private void isAvailable () {
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
List<String> ids = getEnabledPrintServiceIds();
Boolean available = ids.size() > 1;
PluginResult res1 = new PluginResult(
PluginResult.Status.OK, available);
PluginResult res2 = new PluginResult(
PluginResult.Status.OK, new JSONArray(ids));
PluginResult res = new PluginResult(
PluginResult.Status.OK, Arrays.asList(res1, res2));
command.sendPluginResult(res);
}
});
}
/**
* Loads the HTML content into the web view and invokes the print manager.
*
* @param args
* The exec arguments as JSON
*/
private void print (final JSONArray args) {
final String content = args.optString(0, "<html></html>");
final JSONObject props = args.optJSONObject(1);
cordova.getActivity().runOnUiThread( new Runnable() {
@Override
public void run() {
initWebView(props);
loadContent(content);
}
});
}
/**
* Loads the content into the web view.
*
* @param content
* Either an HTML string or URI
*/
private void loadContent(String content) {
if (content.startsWith("http") || content.startsWith("file:")) {
view.loadUrl(content);
} else {
String baseURL = webView.getUrl();
baseURL = baseURL.substring(0, baseURL.lastIndexOf('/') + 1);
// Set base URI to the assets/www folder
view.loadDataWithBaseURL(
baseURL, content, "text/html", "UTF-8", null);
}
}
/**
* Configures the WebView components which will call the Google Cloud Print
* Service.
*
* @param props
* The JSON object with the containing page properties
*/
private void initWebView (JSONObject props) {
Activity ctx = cordova.getActivity();
view = new WebView(ctx);
WebSettings settings = view.getSettings();
settings.setDatabaseEnabled(true);
settings.setGeolocationEnabled(true);
settings.setSaveFormData(true);
settings.setUseWideViewPort(true);
view.setScrollBarStyle(View.SCROLLBARS_INSIDE_OVERLAY);
if (Build.VERSION.SDK_INT >= 21) {
Method setMixedContentModeMethod = getMethod(settings.getClass(),
"setMixedContentMode", int.class);
invokeMethod(settings, setMixedContentModeMethod, 2);
}
setWebViewClient(props);
}
/**
* Creates the web view client which sets the print document.
*
* @param props
* The JSON object with the containing page properties
*/
private void setWebViewClient (JSONObject props) {
final String docName = props.optString("name", DEFAULT_DOC_NAME);
final boolean landscape = props.optBoolean("landscape", false);
final boolean graystyle = props.optBoolean("graystyle", false);
final String duplex = props.optString("duplex", "none");
view.setWebViewClient(new WebViewClient() {
@Override
public boolean shouldOverrideUrlLoading (WebView view, String url) {
return false;
}
@Override
public void onPageFinished (WebView webView, String url) {
PrintManager printManager = getPrintMgr();
PrintAttributes.Builder builder = new PrintAttributes.Builder();
PrintDocumentAdapter adapter = getAdapter(webView, docName);
builder.setMinMargins(PrintAttributes.Margins.NO_MARGINS);
builder.setColorMode(graystyle
? PrintAttributes.COLOR_MODE_MONOCHROME
: PrintAttributes.COLOR_MODE_COLOR);
builder.setMediaSize(landscape
? PrintAttributes.MediaSize.UNKNOWN_LANDSCAPE
: PrintAttributes.MediaSize.UNKNOWN_PORTRAIT);
if (!duplex.equals("none") && Build.VERSION.SDK_INT >= 23) {
boolean longEdge = duplex.equals("long");
Method setDuplexModeMethod = getMethod(builder.getClass(),
"setDuplexMode", int.class);
invokeMethod(builder, setDuplexModeMethod,
longEdge ? 2 : 4);
}
job = printManager.print(docName, adapter, builder.build());
view = null;
}
});
}
/**
* Invokes the callback once the print job is complete or has been canceled.
*/
@Override
public void onResume (boolean multitasking) {
PluginResult res = new PluginResult(
PluginResult.Status.OK, job.isStarted() || job.isCompleted());
job = null;
command.sendPluginResult(res);
super.onResume(multitasking);
}
/**
* Get a PrintManager instance.
*
* @return A PrintManager instance.
*/
private PrintManager getPrintMgr () {
return (PrintManager) cordova.getActivity()
.getSystemService(Context.PRINT_SERVICE);
}
/**
* Create the print document adapter for the web view component. On
* devices older then SDK 21 it will use the deprecated method
* `createPrintDocumentAdapter` without arguments and on newer devices
* the recommended way.
*
* @param webView
* The web view which content to print out.
* @param docName
* The name of the printed document.
* @return
* The created adapter.
*/
private PrintDocumentAdapter getAdapter (WebView webView, String docName) {
if (Build.VERSION.SDK_INT >= 21) {
Method createPrintDocumentAdapterMethod = getMethod(
WebView.class, "createPrintDocumentAdapter", String.class);
return (PrintDocumentAdapter) invokeMethod(
webView, createPrintDocumentAdapterMethod, docName);
} else {
Method createPrintDocumentAdapterMethod = getMethod(
WebView.class, "createPrintDocumentAdapter");
return (PrintDocumentAdapter) invokeMethod(
webView, createPrintDocumentAdapterMethod);
}
}
/**
* Get a list of ids of all installed and enabled print services. For
* that it uses reflections to call public but hidden methods from the
* PrintManager.
*
* @return A list of found print service ids.
*/
private List<String> getEnabledPrintServiceIds () {
try {
PrintManager printMgr = getPrintMgr();
Class<?> printerCls = Class.forName(
"android.printservice.PrintServiceInfo");
Method getPrinterMethod = getMethod(printMgr.getClass(),
"getEnabledPrintServices");
Method getIdMethod = getMethod(printerCls,
"getId");
List printers = (List) invokeMethod(printMgr, getPrinterMethod);
ArrayList<String> printerIds = new ArrayList<String>();
printerIds.add("android.print.pdf");
if (printers == null)
return printerIds;
for (Object printer : printers) {
String printerId = (String) invokeMethod(printer, getIdMethod);
printerIds.add(printerId);
}
return printerIds;
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return Collections.emptyList();
}
/**
* Finds the method with given name and set of arguments.
*
* @param cls
* The class in where to look for the method declaration.
* @param name
* The name of the method.
* @param params
* The arguments of the method.
* @return
* The found method or null.
*/
private Method getMethod (Class<?> cls, String name, Class<?>... params) {
try {
return cls.getDeclaredMethod(name, params);
} catch (NoSuchMethodException e) {
return null;
}
}
/**
* Invokes the method on the given object with the specified arguments.
*
* @param obj
* An object which class defines the method.
* @param method
* The method to invoke.
* @param args
* Set of arguments.
* @return
* The returned object or null.
*/
private Object invokeMethod (Object obj, Method method, Object... args) {
try {
return method.invoke(obj, args);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
}
| src/android/Printer.java | /*
Copyright 2013-2016 appPlant GmbH
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package de.appplant.cordova.plugin.printer;
import android.app.Activity;
import android.content.Context;
import android.os.Build;
import android.print.PrintAttributes;
import android.print.PrintDocumentAdapter;
import android.print.PrintJob;
import android.print.PrintManager;
import android.view.View;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
import org.apache.cordova.CallbackContext;
import org.apache.cordova.CordovaPlugin;
import org.apache.cordova.PluginResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
public class Printer extends CordovaPlugin {
private WebView view;
private CallbackContext command;
private static final String DEFAULT_DOC_NAME = "unknown";
/**
* Executes the request.
*
* This method is called from the WebView thread.
* To do a non-trivial amount of work, use:
* cordova.getThreadPool().execute(runnable);
*
* To run on the UI thread, use:
* cordova.getActivity().runOnUiThread(runnable);
*
* @param action The action to execute.
* @param args The exec() arguments in JSON form.
* @param callback The callback context used when calling back into JavaScript.
* @return Whether the action was valid.
*/
@Override
public boolean execute (String action, JSONArray args,
CallbackContext callback) throws JSONException {
command = callback;
if (action.equalsIgnoreCase("isAvailable")) {
isAvailable();
return true;
}
if (action.equalsIgnoreCase("print")) {
print(args);
return true;
}
return false;
}
/**
* Informs if the device is able to print documents.
* A Internet connection is required to load the cloud print dialog.
*/
private void isAvailable () {
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
List<String> ids = getEnabledPrintServiceIds();
Boolean available = ids.size() > 1;
PluginResult res1 = new PluginResult(
PluginResult.Status.OK, available);
PluginResult res2 = new PluginResult(
PluginResult.Status.OK, new JSONArray(ids));
PluginResult res = new PluginResult(
PluginResult.Status.OK, Arrays.asList(res1, res2));
command.sendPluginResult(res);
}
});
}
/**
* Loads the HTML content into the web view and invokes the print manager.
*
* @param args
* The exec arguments as JSON
*/
private void print (final JSONArray args) {
final String content = args.optString(0, "<html></html>");
final JSONObject props = args.optJSONObject(1);
cordova.getActivity().runOnUiThread( new Runnable() {
@Override
public void run() {
initWebView(props);
loadContent(content);
}
});
}
/**
* Loads the content into the web view.
*
* @param content
* Either an HTML string or URI
*/
private void loadContent(String content) {
if (content.startsWith("http") || content.startsWith("file:")) {
view.loadUrl(content);
} else {
String baseURL = webView.getUrl();
baseURL = baseURL.substring(0, baseURL.lastIndexOf('/') + 1);
// Set base URI to the assets/www folder
view.loadDataWithBaseURL(
baseURL, content, "text/html", "UTF-8", null);
}
}
/**
* Configures the WebView components which will call the Google Cloud Print
* Service.
*
* @param props
* The JSON object with the containing page properties
*/
private void initWebView (JSONObject props) {
Activity ctx = cordova.getActivity();
view = new WebView(ctx);
WebSettings settings = view.getSettings();
settings.setDatabaseEnabled(true);
settings.setGeolocationEnabled(true);
settings.setSaveFormData(true);
settings.setUseWideViewPort(true);
view.setScrollBarStyle(View.SCROLLBARS_INSIDE_OVERLAY);
if (Build.VERSION.SDK_INT >= 21) {
Method setMixedContentModeMethod = getMethod(settings.getClass(),
"setMixedContentMode", int.class);
invokeMethod(settings, setMixedContentModeMethod, 2);
}
setWebViewClient(props);
}
/**
* Creates the web view client which sets the print document.
*
* @param props
* The JSON object with the containing page properties
*/
private void setWebViewClient (JSONObject props) {
final String docName = props.optString("name", DEFAULT_DOC_NAME);
final boolean landscape = props.optBoolean("landscape", false);
final boolean graystyle = props.optBoolean("graystyle", false);
final String duplex = props.optString("duplex", "none");
view.setWebViewClient(new WebViewClient() {
@Override
public boolean shouldOverrideUrlLoading (WebView view, String url) {
return false;
}
@Override
public void onPageFinished (WebView webView, String url) {
PrintManager printManager = getPrintMgr();
PrintAttributes.Builder builder = new PrintAttributes.Builder();
PrintDocumentAdapter adapter = getAdapter(webView, docName);
builder.setMinMargins(PrintAttributes.Margins.NO_MARGINS);
builder.setColorMode(graystyle
? PrintAttributes.COLOR_MODE_MONOCHROME
: PrintAttributes.COLOR_MODE_COLOR);
builder.setMediaSize(landscape
? PrintAttributes.MediaSize.UNKNOWN_LANDSCAPE
: PrintAttributes.MediaSize.UNKNOWN_PORTRAIT);
if (!duplex.equals("none") && Build.VERSION.SDK_INT >= 23) {
boolean longEdge = duplex.equals("long");
Method setDuplexModeMethod = getMethod(builder.getClass(),
"setDuplexMode", int.class);
invokeMethod(builder, setDuplexModeMethod,
longEdge ? 2 : 4);
}
PrintJob job = printManager.print(
docName, adapter, builder.build());
invokeCallbackOnceCompletedOrCanceled(job);
view = null;
}
});
}
/**
* Invokes the callback once the print job is complete or was canceled.
*
* @param job
* The reference to the print job
*/
private void invokeCallbackOnceCompletedOrCanceled (final PrintJob job) {
cordova.getThreadPool().execute(new Runnable() {
@Override
public void run() {
for (;;) {
if (job.isCancelled() || job.isCompleted() || job.isFailed()) {
PluginResult res = new PluginResult(
PluginResult.Status.OK, job.isCompleted());
command.sendPluginResult(res);
break;
}
}
}
});
}
/**
* Get a PrintManager instance.
*
* @return A PrintManager instance.
*/
private PrintManager getPrintMgr () {
return (PrintManager) cordova.getActivity()
.getSystemService(Context.PRINT_SERVICE);
}
/**
* Create the print document adapter for the web view component. On
* devices older then SDK 21 it will use the deprecated method
* `createPrintDocumentAdapter` without arguments and on newer devices
* the recommended way.
*
* @param webView
* The web view which content to print out.
* @param docName
* The name of the printed document.
* @return
* The created adapter.
*/
private PrintDocumentAdapter getAdapter (WebView webView, String docName) {
if (Build.VERSION.SDK_INT >= 21) {
Method createPrintDocumentAdapterMethod = getMethod(
WebView.class, "createPrintDocumentAdapter", String.class);
return (PrintDocumentAdapter) invokeMethod(
webView, createPrintDocumentAdapterMethod, docName);
} else {
Method createPrintDocumentAdapterMethod = getMethod(
WebView.class, "createPrintDocumentAdapter");
return (PrintDocumentAdapter) invokeMethod(
webView, createPrintDocumentAdapterMethod);
}
}
/**
* Get a list of ids of all installed and enabled print services. For
* that it uses reflections to call public but hidden methods from the
* PrintManager.
*
* @return A list of found print service ids.
*/
private List<String> getEnabledPrintServiceIds () {
try {
PrintManager printMgr = getPrintMgr();
Class<?> printerCls = Class.forName(
"android.printservice.PrintServiceInfo");
Method getPrinterMethod = getMethod(printMgr.getClass(),
"getEnabledPrintServices");
Method getIdMethod = getMethod(printerCls,
"getId");
List printers = (List) invokeMethod(printMgr, getPrinterMethod);
ArrayList<String> printerIds = new ArrayList<String>();
printerIds.add("android.print.pdf");
if (printers == null)
return printerIds;
for (Object printer : printers) {
String printerId = (String) invokeMethod(printer, getIdMethod);
printerIds.add(printerId);
}
return printerIds;
} catch (ClassNotFoundException e) {
e.printStackTrace();
}
return Collections.emptyList();
}
/**
* Finds the method with given name and set of arguments.
*
* @param cls
* The class in where to look for the method declaration.
* @param name
* The name of the method.
* @param params
* The arguments of the method.
* @return
* The found method or null.
*/
private Method getMethod (Class<?> cls, String name, Class<?>... params) {
try {
return cls.getDeclaredMethod(name, params);
} catch (NoSuchMethodException e) {
return null;
}
}
/**
* Invokes the method on the given object with the specified arguments.
*
* @param obj
* An object which class defines the method.
* @param method
* The method to invoke.
* @param args
* Set of arguments.
* @return
* The returned object or null.
*/
private Object invokeMethod (Object obj, Method method, Object... args) {
try {
return method.invoke(obj, args);
} catch (IllegalAccessException e) {
e.printStackTrace();
} catch (InvocationTargetException e) {
e.printStackTrace();
}
return null;
}
}
| Fix potential crash and infinite loop (Closes #61)
| src/android/Printer.java | Fix potential crash and infinite loop (Closes #61) |
|
Java | bsd-3-clause | 7761c506e774ff823b94701b777227668875d64e | 0 | dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk,dhis2/dhis2-android-sdk | /*
* Copyright (c) 2004-2019, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.testapp.category;
import com.google.common.collect.Lists;
import org.hisp.dhis.android.core.category.CategoryOptionCombo;
import org.hisp.dhis.android.core.data.database.SyncedDatabaseMockIntegrationShould;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.List;
import androidx.test.runner.AndroidJUnit4;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
@RunWith(AndroidJUnit4.class)
public class CategoryOptionComboCollectionRepositoryMockIntegrationShould extends SyncedDatabaseMockIntegrationShould {
@Test
public void find_all() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.withCategoryOptions().get();
assertThat(categoryOptionCombos.size(), is(4));
}
@Test
public void filter_by_category_combo_A() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryComboUid().eq("m2jTvAj5kkm")
.get();
assertThat(categoryOptionCombos.size(), is(2));
}
@Test
public void filter_by_category_combo_B() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryComboUid().eq("p0KPaWEg3cf")
.get();
assertThat(categoryOptionCombos.size(), is(2));
}
@Test
public void filter_by_category_option() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("as6ygGvUGNg"))
.get();
assertThat(categoryOptionCombos.size(), is(1));
}
@Test
public void filter_by_category_option_list() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("Fp4gVHbRvEV", "uZUnebiT5DI"))
.get();
assertThat(categoryOptionCombos.size(), is(1));
}
@Test
public void not_find_combos_when_filter_by_less_options_than_they_have() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("Fp4gVHbRvEV"))
.get();
assertThat(categoryOptionCombos.size(), is(0));
}
@Test
public void not_find_combos_when_filter_by_more_options_than_they_have() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("as6ygGvUGNg", "Fp4gVHbRvEV", "uZUnebiT5DI"))
.get();
assertThat(categoryOptionCombos.size(), is(0));
}
@Test
public void not_find_combos_when_no_matching_options() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("as6ygGvUGNg", "Fp4gVHbRvEV"))
.get();
assertThat(categoryOptionCombos.size(), is(0));
}
@Test
public void include_category_options_as_children() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos
.withCategoryOptions().one().get();
assertThat(categoryOptionCombo.categoryOptions().get(0).name(), is("At PHU"));
}
@Test
public void include_category_options_as_children_in_collection_repository_when_all_selected() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos
.withAllChildren().get().get(0);
assertThat(categoryOptionCombo.categoryOptions().get(0).name(), is("At PHU"));
}
@Test
public void include_category_options_as_children_in_object_repository_when_all_selected() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos
.one().withAllChildren().get();
assertThat(categoryOptionCombo.categoryOptions().get(0).name(), is("At PHU"));
}
} | core/src/androidTest/java/org/hisp/dhis/android/testapp/category/CategoryOptionComboCollectionRepositoryMockIntegrationShould.java | /*
* Copyright (c) 2004-2019, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.android.testapp.category;
import com.google.common.collect.Lists;
import org.hisp.dhis.android.core.category.CategoryOptionCombo;
import org.hisp.dhis.android.core.data.database.SyncedDatabaseMockIntegrationShould;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.List;
import androidx.test.runner.AndroidJUnit4;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.core.Is.is;
@RunWith(AndroidJUnit4.class)
public class CategoryOptionComboCollectionRepositoryMockIntegrationShould extends SyncedDatabaseMockIntegrationShould {
@Test
public void find_all() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.withCategoryOptions().get();
assertThat(categoryOptionCombos.size(), is(4));
}
@Test
public void filter_by_category_combo_A() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryComboUid().eq("m2jTvAj5kkm")
.get();
assertThat(categoryOptionCombos.size(), is(2));
}
@Test
public void filter_by_category_combo_B() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryComboUid().eq("p0KPaWEg3cf")
.get();
assertThat(categoryOptionCombos.size(), is(2));
}
@Test
public void filter_by_category_option() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("as6ygGvUGNg"))
.get();
assertThat(categoryOptionCombos.size(), is(1));
}
@Test
public void filter_by_category_option_list() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("Fp4gVHbRvEV", "uZUnebiT5DI"))
.get();
assertThat(categoryOptionCombos.size(), is(1));
}
@Test
public void not_find_combos_when_filter_by_less_options_than_they_have() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("Fp4gVHbRvEV"))
.get();
assertThat(categoryOptionCombos.size(), is(0));
}
@Test
public void not_find_combos_when_filter_by_more_options_than_they_have() {
List<CategoryOptionCombo> categoryOptionCombos = d2.categoryModule().categoryOptionCombos
.byCategoryOptions(Lists.newArrayList("as6ygGvUGNg", "Fp4gVHbRvEV", "uZUnebiT5DI"))
.get();
assertThat(categoryOptionCombos.size(), is(0));
}
@Test
public void include_category_options_as_children() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos
.withCategoryOptions().one().get();
assertThat(categoryOptionCombo.categoryOptions().get(0).name(), is("At PHU"));
}
@Test
public void include_category_options_as_children_in_collection_repository_when_all_selected() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos
.withAllChildren().get().get(0);
assertThat(categoryOptionCombo.categoryOptions().get(0).name(), is("At PHU"));
}
@Test
public void include_category_options_as_children_in_object_repository_when_all_selected() {
CategoryOptionCombo categoryOptionCombo = d2.categoryModule().categoryOptionCombos
.one().withAllChildren().get();
assertThat(categoryOptionCombo.categoryOptions().get(0).name(), is("At PHU"));
}
} | [ANDROSDK-760] Test that repository not find combos when there are not matching options
| core/src/androidTest/java/org/hisp/dhis/android/testapp/category/CategoryOptionComboCollectionRepositoryMockIntegrationShould.java | [ANDROSDK-760] Test that repository not find combos when there are not matching options |
|
Java | isc | 77f2ab647dbf2d263f5d2b13429a2d879523b835 | 0 | TealCube/strife | /**
* The MIT License
* Copyright (c) 2015 Teal Cube Games
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package info.faceland.strife.listeners;
import com.tealcube.minecraft.bukkit.facecore.utilities.MessageUtils;
import com.tealcube.minecraft.bukkit.kern.apache.commons.lang3.math.NumberUtils;
import com.tealcube.minecraft.bukkit.kern.shade.google.common.base.CharMatcher;
import info.faceland.beast.BeastData;
import info.faceland.strife.StrifePlugin;
import info.faceland.strife.attributes.StrifeAttribute;
import info.faceland.strife.data.Champion;
import org.bukkit.ChatColor;
import org.bukkit.Sound;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.entity.Projectile;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.ProjectileLaunchEvent;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import java.util.Map;
import java.util.Random;
public class CombatListener implements Listener {
private static final String[]
DOGE_MEMES =
{"<aqua>wow", "<green>wow", "<light purple>wow", "<aqua>much pain", "<green>much pain",
"<light purple>much pain",
"<aqua>many disrespects", "<green>many disrespects", "<light purple>many disrespects", "<red>no u",
"<red>2damage4me"};
private final StrifePlugin plugin;
private final Random random;
public CombatListener(StrifePlugin plugin) {
this.plugin = plugin;
random = new Random(System.currentTimeMillis());
}
@EventHandler(priority = EventPriority.MONITOR)
public void onEntityDamageEvent(EntityDamageEvent event) {
if (!(event.getEntity() instanceof Player)) {
return;
}
Player p = (Player) event.getEntity();
Champion champ = plugin.getChampionManager().getChampion(p.getUniqueId());
Map<StrifeAttribute, Double> vals = champ.getAttributeValues();
double chance = vals.get(StrifeAttribute.DOGE);
if (random.nextDouble() > chance) {
return;
}
MessageUtils.sendMessage(p, DOGE_MEMES[random.nextInt(DOGE_MEMES.length)]);
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onProjectileLaunch(ProjectileLaunchEvent event) {
if (event.getEntity().getShooter() instanceof Entity) {
event.getEntity()
.setVelocity(
event.getEntity().getVelocity().add(((Entity) event.getEntity().getShooter()).getVelocity()));
}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onEntityDamageByEntity(EntityDamageByEntityEvent event) {
LivingEntity a;
if (event.isCancelled() || !(event.getEntity() instanceof LivingEntity)) {
return;
}
if (event.getEntity().hasMetadata("NPC")) {
return;
}
// LET THE DATA GATHERING COMMENCE
boolean melee = true;
boolean aPlayer = false;
boolean bPlayer = false;
double poisonMult = 1.0;
double meleeMult = 1.0;
double rangedMult = 1.0;
if (event.getDamager() instanceof LivingEntity) {
a = (LivingEntity) event.getDamager();
} else if (event.getDamager() instanceof Projectile && ((Projectile) event.getDamager())
.getShooter() instanceof LivingEntity) {
a = (LivingEntity) ((Projectile) event.getDamager()).getShooter();
melee = false;
} else {
return;
}
LivingEntity b = (LivingEntity) event.getEntity();
if (a == null || b == null) {
return;
}
if (a instanceof Player) {
aPlayer = true;
}
if (b instanceof Player) {
bPlayer = true;
}
if (bPlayer) {
double chance = plugin.getChampionManager().getChampion(b.getUniqueId()).getAttributeValues()
.get(StrifeAttribute.EVASION);
double accuracy = 0;
chance = 1 - (100 / (100 + (Math.pow((chance * 100), 1.25))));
if (aPlayer) {
accuracy = plugin.getChampionManager().getChampion(a.getUniqueId()).getAttributeValues()
.get(StrifeAttribute.ACCURACY);
chance = chance * (1 - accuracy);
}
if (random.nextDouble() < chance) {
event.setCancelled(true);
b.getWorld().playSound(a.getEyeLocation(), Sound.GHAST_FIREBALL, 1f, 2f);
return;
}
}
double damage;
double pvpMult = 1.0;
double hungerMult = 1.0;
double critbonus = 0, overbonus = 0, trueDamage = 0;
double meleeDamageA = StrifeAttribute.MELEE_DAMAGE.getBaseValue(), attackSpeedA;
double overchargeA = StrifeAttribute.OVERCHARGE.getBaseValue();
double armorPenA = StrifeAttribute.ARMOR_PENETRATION.getBaseValue();
double lifeStealA = StrifeAttribute.LIFE_STEAL.getBaseValue(), lifeStolenA;
double rangedDamageA = StrifeAttribute.RANGED_DAMAGE.getBaseValue(), snarechanceA = StrifeAttribute.CRITICAL_RATE.getBaseValue();
double criticalRateA = StrifeAttribute.CRITICAL_RATE.getBaseValue(), criticalDamageA = StrifeAttribute.CRITICAL_DAMAGE.getBaseValue();
double attackSpeedMultA = 1D;
double fireDamageA = StrifeAttribute.FIRE_DAMAGE.getBaseValue(), igniteChanceA = StrifeAttribute.IGNITE_CHANCE.getBaseValue();
double lightningDamageA = StrifeAttribute.LIGHTNING_DAMAGE.getBaseValue(), shockChanceA = StrifeAttribute.SHOCK_CHANCE.getBaseValue();
double iceDamageA = StrifeAttribute.ICE_DAMAGE.getBaseValue(), freezeChanceA = StrifeAttribute.FREEZE_CHANCE.getBaseValue();
double armorB = StrifeAttribute.ARMOR.getBaseValue(), reflectDamageB = StrifeAttribute.DAMAGE_REFLECT.getBaseValue();
double healthB = b.getMaxHealth();
double resistB = 0;
double parryB, blockB = StrifeAttribute.BLOCK.getBaseValue();
boolean blocking = false;
boolean parried = false;
if (b.hasPotionEffect(PotionEffectType.WITHER)) {
meleeMult += 0.1D;
rangedMult += 0.1D;
}
if (b.hasPotionEffect(PotionEffectType.DAMAGE_RESISTANCE)) {
meleeMult -= 0.1D;
rangedMult -= 0.1D;
}
if (aPlayer) {
hungerMult = Math.min(((double) (((Player) a).getFoodLevel()))/7.0D, 1.0D);
if (b instanceof Player) {
pvpMult = 0.5;
}
if (a.hasPotionEffect(PotionEffectType.POISON)) {
poisonMult = 0.33D;
}
if (a.hasPotionEffect(PotionEffectType.INCREASE_DAMAGE)) {
meleeMult += 0.1D;
}
if (a.hasPotionEffect(PotionEffectType.WEAKNESS)) {
meleeMult -= 0.1D;
}
if (a.hasPotionEffect(PotionEffectType.NIGHT_VISION)) {
rangedMult = 1.1D;
}
for (EntityDamageEvent.DamageModifier modifier : EntityDamageEvent.DamageModifier.values()) {
if (event.isApplicable(modifier)) {
event.setDamage(modifier, 0D);
}
}
Player p = (Player) a;
Champion champ = plugin.getChampionManager().getChampion(p.getUniqueId());
Map<StrifeAttribute, Double> vals = champ.getAttributeValues();
meleeDamageA = vals.get(StrifeAttribute.MELEE_DAMAGE);
attackSpeedA =
(StrifeAttribute.ATTACK_SPEED.getBaseValue() * (1 / (1 + vals.get(StrifeAttribute.ATTACK_SPEED))));
criticalDamageA = vals.get(StrifeAttribute.CRITICAL_DAMAGE);
armorPenA = vals.get(StrifeAttribute.ARMOR_PENETRATION);
overchargeA = vals.get(StrifeAttribute.OVERCHARGE);
lifeStealA = vals.get(StrifeAttribute.LIFE_STEAL);
rangedDamageA = vals.get(StrifeAttribute.RANGED_DAMAGE);
criticalRateA = vals.get(StrifeAttribute.CRITICAL_RATE);
snarechanceA = vals.get(StrifeAttribute.SNARE_CHANCE);
fireDamageA = vals.get(StrifeAttribute.FIRE_DAMAGE);
lightningDamageA = vals.get(StrifeAttribute.LIGHTNING_DAMAGE);
iceDamageA = vals.get(StrifeAttribute.ICE_DAMAGE);
igniteChanceA = vals.get(StrifeAttribute.IGNITE_CHANCE);
shockChanceA = vals.get(StrifeAttribute.SHOCK_CHANCE);
freezeChanceA = vals.get(StrifeAttribute.FREEZE_CHANCE);
long timeLeft = plugin.getAttackSpeedTask().getTimeLeft(a.getUniqueId());
long timeToSet = Math.round(Math.max(4.0 * attackSpeedA, 0.0));
if (timeLeft > 0) {
attackSpeedMultA = Math.max(1.0 - 1.0 * timeLeft / timeToSet, 0.0);
}
plugin.getAttackSpeedTask().setTimeLeft(a.getUniqueId(), timeToSet);
} else {
if (a.getType() != null) {
BeastData data = plugin.getBeastPlugin().getData(a.getType());
String name = a.getCustomName() != null ? ChatColor.stripColor(a.getCustomName()) : "0";
if (data != null && a.getCustomName() != null) {
int level = NumberUtils.toInt(CharMatcher.DIGIT.retainFrom(name));
meleeDamageA = (data.getDamageExpression().setVariable("LEVEL", level).evaluate());
rangedDamageA = meleeDamageA;
}
}
}
if (bPlayer) {
Player p = (Player) b;
Champion champ = plugin.getChampionManager().getChampion(p.getUniqueId());
Map<StrifeAttribute, Double> vals = champ.getAttributeValues();
armorB = vals.get(StrifeAttribute.ARMOR);
resistB = vals.get(StrifeAttribute.RESISTANCE);
reflectDamageB = vals.get(StrifeAttribute.DAMAGE_REFLECT);
parryB = vals.get(StrifeAttribute.PARRY);
blockB = vals.get(StrifeAttribute.BLOCK);
if (((Player) b).isBlocking()) {
blocking = true;
if (random.nextDouble() < parryB) {
parried = true;
}
}
}
// LET THE DAMAGE CALCULATION COMMENCE
if (melee) {
if (event.getCause() == EntityDamageEvent.DamageCause.ENTITY_EXPLOSION) {
damage = (meleeDamageA * 0.2) + ((meleeDamageA * 0.8) / (a.getLocation().distanceSquared(b.getLocation()) / 2));
} else {
damage = meleeDamageA * attackSpeedMultA * meleeMult;
}
if (parried) {
a.damage(damage * 1.25 * pvpMult);
event.setCancelled(true);
b.getWorld().playSound(b.getEyeLocation(), Sound.ANVIL_LAND, 1f, 2f);
return;
}
if (random.nextDouble() < criticalRateA) {
critbonus = damage * (criticalDamageA - 1.0);
b.getWorld().playSound(b.getEyeLocation(), Sound.FALL_BIG, 2f, 1f);
}
if (attackSpeedMultA >= 1D) {
overbonus = overchargeA * damage;
}
damage = damage + critbonus + overbonus;
double blockReducer = 1;
double damageReducer = 100 / (100 + (Math.pow((armorB * 100), 1.3)));
damageReducer *= (1 - armorPenA);
if (blocking) {
blockReducer = (1 - blockB);
}
if (reflectDamageB > 0) {
a.damage(damage * reflectDamageB * pvpMult);
a.getWorld().playSound(a.getEyeLocation(), Sound.GLASS, 0.6f, 2f);
}
if (fireDamageA > 0) {
if (random.nextDouble() < ((igniteChanceA * attackSpeedMultA * 1.2) * (1 - resistB))) {
b.setFireTicks((int) Math.round(fireDamageA * 20));
b.getWorld().playSound(b.getEyeLocation(), Sound.FIRE_IGNITE, 1f, 1f);
}
}
if (lightningDamageA > 0) {
if (random.nextDouble() < ((shockChanceA * attackSpeedMultA * 1.2) * (1 - resistB))) {
trueDamage = lightningDamageA;
b.getWorld().playSound(b.getEyeLocation(), Sound.AMBIENCE_THUNDER, 1f, 1.5f);
}
}
if (iceDamageA > 0) {
if (random.nextDouble() < ((freezeChanceA * attackSpeedMultA * 1.2) * (1 - resistB))) {
damage = damage + iceDamageA + Math.min(((healthB / 200) * iceDamageA), iceDamageA*4);
b.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 40, 2));
b.getWorld().playSound(b.getEyeLocation(), Sound.GLASS, 1f, 1f);
}
}
event.setDamage(EntityDamageEvent.DamageModifier.BASE, ((damage * damageReducer * blockReducer) + trueDamage) * pvpMult);
if (a instanceof Player) {
lifeStolenA = event.getFinalDamage() * lifeStealA * poisonMult * hungerMult;
a.setHealth(Math.min(a.getHealth() + lifeStolenA, a.getMaxHealth()));
}
} else {
if (parried) {
event.setCancelled(true);
b.getWorld().playSound(b.getEyeLocation(), Sound.ANVIL_LAND, 1f, 2f);
return;
}
damage = rangedDamageA * rangedMult * (a instanceof Player ? (event.getDamager().getVelocity().lengthSquared() / Math.pow(3, 2)) : 1);
double blockReducer = 1;
double damageReducer = 100 / (100 + (Math.pow((armorB * 100), 1.3)));
damageReducer *= (1 - armorPenA);
if (random.nextDouble() < criticalRateA) {
damage = damage * criticalDamageA;
b.getWorld().playSound(b.getEyeLocation(), Sound.FALL_BIG, 2f, 1f);
}
if (random.nextDouble() < snarechanceA) {
b.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 40, 5));
}
if (blocking) {
blockReducer = (1 - blockB);
}
if (fireDamageA > 0) {
if (random.nextDouble() < (igniteChanceA * (1 - resistB))) {
b.setFireTicks((int) Math.round(fireDamageA * 20));
b.getWorld().playSound(b.getEyeLocation(), Sound.FIRE_IGNITE, 1f, 1f);
}
}
if (lightningDamageA > 0) {
if (random.nextDouble() < (shockChanceA * (1 - resistB))) {
trueDamage = lightningDamageA;
b.getWorld().playSound(b.getEyeLocation(), Sound.AMBIENCE_THUNDER, 1f, 1.5f);
a.getWorld().playSound(a.getEyeLocation(), Sound.AMBIENCE_THUNDER, 1f, 1.5f);
}
}
if (iceDamageA > 0) {
if (random.nextDouble() < (freezeChanceA * (1 - resistB))) {
damage = damage + iceDamageA + Math.min(((healthB / 200) * iceDamageA), iceDamageA*4);
b.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 40, 2));
b.getWorld().playSound(b.getEyeLocation(), Sound.GLASS, 1f, 1f);
}
}
event.setDamage(EntityDamageEvent.DamageModifier.BASE,
((damage * damageReducer * blockReducer) + trueDamage) * pvpMult);
if (a instanceof Player) {
lifeStolenA = event.getFinalDamage() * lifeStealA * poisonMult * hungerMult;
a.setHealth(Math.min(a.getHealth() + lifeStolenA, a.getMaxHealth()));
}
}
}
}
| src/main/java/info/faceland/strife/listeners/CombatListener.java | /**
* The MIT License
* Copyright (c) 2015 Teal Cube Games
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package info.faceland.strife.listeners;
import com.tealcube.minecraft.bukkit.facecore.utilities.MessageUtils;
import com.tealcube.minecraft.bukkit.kern.apache.commons.lang3.math.NumberUtils;
import com.tealcube.minecraft.bukkit.kern.shade.google.common.base.CharMatcher;
import info.faceland.beast.BeastData;
import info.faceland.strife.StrifePlugin;
import info.faceland.strife.attributes.StrifeAttribute;
import info.faceland.strife.data.Champion;
import org.bukkit.ChatColor;
import org.bukkit.Sound;
import org.bukkit.entity.Entity;
import org.bukkit.entity.LivingEntity;
import org.bukkit.entity.Player;
import org.bukkit.entity.Projectile;
import org.bukkit.event.EventHandler;
import org.bukkit.event.EventPriority;
import org.bukkit.event.Listener;
import org.bukkit.event.entity.EntityDamageByEntityEvent;
import org.bukkit.event.entity.EntityDamageEvent;
import org.bukkit.event.entity.ProjectileLaunchEvent;
import org.bukkit.potion.PotionEffect;
import org.bukkit.potion.PotionEffectType;
import java.util.Map;
import java.util.Random;
public class CombatListener implements Listener {
private static final String[]
DOGE_MEMES =
{"<aqua>wow", "<green>wow", "<light purple>wow", "<aqua>much pain", "<green>much pain",
"<light purple>much pain",
"<aqua>many disrespects", "<green>many disrespects", "<light purple>many disrespects", "<red>no u",
"<red>2damage4me"};
private final StrifePlugin plugin;
private final Random random;
public CombatListener(StrifePlugin plugin) {
this.plugin = plugin;
random = new Random(System.currentTimeMillis());
}
@EventHandler(priority = EventPriority.MONITOR)
public void onEntityDamageEvent(EntityDamageEvent event) {
if (!(event.getEntity() instanceof Player)) {
return;
}
Player p = (Player) event.getEntity();
Champion champ = plugin.getChampionManager().getChampion(p.getUniqueId());
Map<StrifeAttribute, Double> vals = champ.getAttributeValues();
double chance = vals.get(StrifeAttribute.DOGE);
if (random.nextDouble() > chance) {
return;
}
MessageUtils.sendMessage(p, DOGE_MEMES[random.nextInt(DOGE_MEMES.length)]);
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onProjectileLaunch(ProjectileLaunchEvent event) {
if (event.getEntity().getShooter() instanceof Entity) {
event.getEntity()
.setVelocity(
event.getEntity().getVelocity().add(((Entity) event.getEntity().getShooter()).getVelocity()));
}
}
@EventHandler(priority = EventPriority.HIGHEST)
public void onEntityDamageByEntity(EntityDamageByEntityEvent event) {
LivingEntity a;
if (event.isCancelled() || !(event.getEntity() instanceof LivingEntity)) {
return;
}
if (event.getEntity().hasMetadata("NPC")) {
return;
}
// LET THE DATA GATHERING COMMENCE
boolean melee = true;
boolean aPlayer = false;
boolean bPlayer = false;
double poisonMult = 1.0;
double meleeMult = 1.0;
double rangedMult = 1.0;
if (event.getDamager() instanceof LivingEntity) {
a = (LivingEntity) event.getDamager();
} else if (event.getDamager() instanceof Projectile && ((Projectile) event.getDamager())
.getShooter() instanceof LivingEntity) {
a = (LivingEntity) ((Projectile) event.getDamager()).getShooter();
melee = false;
} else {
return;
}
LivingEntity b = (LivingEntity) event.getEntity();
if (a == null || b == null) {
return;
}
if (a instanceof Player) {
aPlayer = true;
}
if (b instanceof Player) {
bPlayer = true;
}
if (bPlayer) {
double chance = plugin.getChampionManager().getChampion(b.getUniqueId()).getAttributeValues()
.get(StrifeAttribute.EVASION);
double accuracy = 0;
chance = 1 - (100 / (100 + (Math.pow((chance * 100), 1.25))));
if (aPlayer) {
accuracy = plugin.getChampionManager().getChampion(a.getUniqueId()).getAttributeValues()
.get(StrifeAttribute.ACCURACY);
chance = chance * (1 - accuracy);
}
if (random.nextDouble() < chance) {
event.setCancelled(true);
b.getWorld().playSound(a.getEyeLocation(), Sound.GHAST_FIREBALL, 1f, 2f);
return;
}
}
double damage;
double pvpMult = 1.0;
double hungerMult = 1.0;
double critbonus = 0, overbonus = 0, trueDamage = 0;
double meleeDamageA = StrifeAttribute.MELEE_DAMAGE.getBaseValue(), attackSpeedA;
double overchargeA = StrifeAttribute.OVERCHARGE.getBaseValue();
double armorPenA = StrifeAttribute.ARMOR_PENETRATION.getBaseValue();
double lifeStealA = StrifeAttribute.LIFE_STEAL.getBaseValue(), lifeStolenA;
double rangedDamageA = StrifeAttribute.RANGED_DAMAGE.getBaseValue(), snarechanceA = StrifeAttribute.CRITICAL_RATE.getBaseValue();
double criticalRateA = StrifeAttribute.CRITICAL_RATE.getBaseValue(), criticalDamageA = StrifeAttribute.CRITICAL_DAMAGE.getBaseValue();
double attackSpeedMultA = 1D;
double fireDamageA = StrifeAttribute.FIRE_DAMAGE.getBaseValue(), igniteChanceA = StrifeAttribute.IGNITE_CHANCE.getBaseValue();
double lightningDamageA = StrifeAttribute.LIGHTNING_DAMAGE.getBaseValue(), shockChanceA = StrifeAttribute.SHOCK_CHANCE.getBaseValue();
double iceDamageA = StrifeAttribute.ICE_DAMAGE.getBaseValue(), freezeChanceA = StrifeAttribute.FREEZE_CHANCE.getBaseValue();
double armorB = StrifeAttribute.ARMOR.getBaseValue(), reflectDamageB = StrifeAttribute.DAMAGE_REFLECT.getBaseValue();
double healthB = b.getMaxHealth();
double resistB = 0;
double parryB, blockB = StrifeAttribute.BLOCK.getBaseValue();
boolean blocking = false;
boolean parried = false;
if (b.hasPotionEffect(PotionEffectType.WITHER)) {
meleeMult += 0.1D;
rangedMult += 0.1D;
}
if (b.hasPotionEffect(PotionEffectType.DAMAGE_RESISTANCE)) {
meleeMult -= 0.1D;
rangedMult -= 0.1D;
}
if (aPlayer) {
hungerMult = Math.min(((double) (((Player) a).getFoodLevel()))/7.0D, 1.0D);
if (b instanceof Player) {
pvpMult = 0.5;
}
if (a.hasPotionEffect(PotionEffectType.POISON)) {
poisonMult = 0.33D;
}
if (a.hasPotionEffect(PotionEffectType.INCREASE_DAMAGE)) {
meleeMult += 0.1D;
}
if (a.hasPotionEffect(PotionEffectType.WEAKNESS)) {
meleeMult -= 0.1D;
}
if (a.hasPotionEffect(PotionEffectType.NIGHT_VISION)) {
rangedMult = 1.1D;
}
for (EntityDamageEvent.DamageModifier modifier : EntityDamageEvent.DamageModifier.values()) {
if (event.isApplicable(modifier)) {
event.setDamage(modifier, 0D);
}
}
Player p = (Player) a;
Champion champ = plugin.getChampionManager().getChampion(p.getUniqueId());
Map<StrifeAttribute, Double> vals = champ.getAttributeValues();
meleeDamageA = vals.get(StrifeAttribute.MELEE_DAMAGE);
attackSpeedA =
(StrifeAttribute.ATTACK_SPEED.getBaseValue() * (1 / (1 + vals.get(StrifeAttribute.ATTACK_SPEED))));
criticalDamageA = vals.get(StrifeAttribute.CRITICAL_DAMAGE);
armorPenA = vals.get(StrifeAttribute.ARMOR_PENETRATION);
overchargeA = vals.get(StrifeAttribute.OVERCHARGE);
lifeStealA = vals.get(StrifeAttribute.LIFE_STEAL);
rangedDamageA = vals.get(StrifeAttribute.RANGED_DAMAGE);
criticalRateA = vals.get(StrifeAttribute.CRITICAL_RATE);
snarechanceA = vals.get(StrifeAttribute.SNARE_CHANCE);
fireDamageA = vals.get(StrifeAttribute.FIRE_DAMAGE);
lightningDamageA = vals.get(StrifeAttribute.LIGHTNING_DAMAGE);
iceDamageA = vals.get(StrifeAttribute.ICE_DAMAGE);
igniteChanceA = vals.get(StrifeAttribute.IGNITE_CHANCE);
shockChanceA = vals.get(StrifeAttribute.SHOCK_CHANCE);
freezeChanceA = vals.get(StrifeAttribute.FREEZE_CHANCE);
long timeLeft = plugin.getAttackSpeedTask().getTimeLeft(a.getUniqueId());
long timeToSet = Math.round(Math.max(4.0 * attackSpeedA, 0.0));
if (timeLeft > 0) {
attackSpeedMultA = Math.max(1.0 - 1.0 * timeLeft / timeToSet, 0.0);
}
plugin.getAttackSpeedTask().setTimeLeft(a.getUniqueId(), timeToSet);
} else {
if (a.getType() != null) {
BeastData data = plugin.getBeastPlugin().getData(a.getType());
String name = a.getCustomName() != null ? ChatColor.stripColor(a.getCustomName()) : "0";
if (data != null && a.getCustomName() != null) {
int level = NumberUtils.toInt(CharMatcher.DIGIT.retainFrom(name));
meleeDamageA = (data.getDamageExpression().setVariable("LEVEL", level).evaluate());
rangedDamageA = meleeDamageA;
}
}
}
if (bPlayer) {
Player p = (Player) b;
Champion champ = plugin.getChampionManager().getChampion(p.getUniqueId());
Map<StrifeAttribute, Double> vals = champ.getAttributeValues();
armorB = vals.get(StrifeAttribute.ARMOR);
resistB = vals.get(StrifeAttribute.RESISTANCE);
reflectDamageB = vals.get(StrifeAttribute.DAMAGE_REFLECT);
parryB = vals.get(StrifeAttribute.PARRY);
blockB = vals.get(StrifeAttribute.BLOCK);
if (((Player) b).isBlocking()) {
blocking = true;
if (random.nextDouble() < parryB) {
parried = true;
}
}
}
// LET THE DAMAGE CALCULATION COMMENCE
if (melee) {
damage = meleeDamageA * attackSpeedMultA * meleeMult;
if (parried) {
a.damage(damage * 1.25 * pvpMult);
event.setCancelled(true);
b.getWorld().playSound(b.getEyeLocation(), Sound.ANVIL_LAND, 1f, 2f);
return;
}
if (random.nextDouble() < criticalRateA) {
critbonus = damage * (criticalDamageA - 1.0);
b.getWorld().playSound(b.getEyeLocation(), Sound.FALL_BIG, 2f, 1f);
}
if (attackSpeedMultA >= 1D) {
overbonus = overchargeA * damage;
}
damage = damage + critbonus + overbonus;
double blockReducer = 1;
double damageReducer = 100 / (100 + (Math.pow((armorB * 100), 1.3)));
damageReducer *= (1 - armorPenA);
if (blocking) {
blockReducer = (1 - blockB);
}
if (event.getCause() == EntityDamageEvent.DamageCause.ENTITY_EXPLOSION) {
damage += damage / (a.getLocation().distanceSquared(b.getLocation()) / 2);
}
if (reflectDamageB > 0) {
a.damage(damage * reflectDamageB * pvpMult);
a.getWorld().playSound(a.getEyeLocation(), Sound.GLASS, 0.6f, 2f);
}
if (fireDamageA > 0) {
if (random.nextDouble() < ((igniteChanceA * attackSpeedMultA * 1.2) * (1 - resistB))) {
b.setFireTicks((int) Math.round(fireDamageA * 20));
b.getWorld().playSound(b.getEyeLocation(), Sound.FIRE_IGNITE, 1f, 1f);
}
}
if (lightningDamageA > 0) {
if (random.nextDouble() < ((shockChanceA * attackSpeedMultA * 1.2) * (1 - resistB))) {
trueDamage = lightningDamageA;
b.getWorld().playSound(b.getEyeLocation(), Sound.AMBIENCE_THUNDER, 1f, 1.5f);
}
}
if (iceDamageA > 0) {
if (random.nextDouble() < ((freezeChanceA * attackSpeedMultA * 1.2) * (1 - resistB))) {
damage = damage + iceDamageA + Math.min(((healthB / 200) * iceDamageA), iceDamageA*4);
b.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 40, 2));
b.getWorld().playSound(b.getEyeLocation(), Sound.GLASS, 1f, 1f);
}
}
event.setDamage(EntityDamageEvent.DamageModifier.BASE, ((damage * damageReducer * blockReducer) + trueDamage) * pvpMult);
if (a instanceof Player) {
lifeStolenA = event.getFinalDamage() * lifeStealA * poisonMult * hungerMult;
a.setHealth(Math.min(a.getHealth() + lifeStolenA, a.getMaxHealth()));
}
} else {
if (parried) {
event.setCancelled(true);
b.getWorld().playSound(b.getEyeLocation(), Sound.ANVIL_LAND, 1f, 2f);
return;
}
damage = rangedDamageA * rangedMult * (a instanceof Player ? (event.getDamager().getVelocity().lengthSquared() / Math.pow(3, 2)) : 1);
double blockReducer = 1;
double damageReducer = 100 / (100 + (Math.pow((armorB * 100), 1.3)));
damageReducer *= (1 - armorPenA);
if (random.nextDouble() < criticalRateA) {
damage = damage * criticalDamageA;
b.getWorld().playSound(b.getEyeLocation(), Sound.FALL_BIG, 2f, 1f);
}
if (random.nextDouble() < snarechanceA) {
b.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 40, 5));
}
if (blocking) {
blockReducer = (1 - blockB);
}
if (fireDamageA > 0) {
if (random.nextDouble() < (igniteChanceA * (1 - resistB))) {
b.setFireTicks((int) Math.round(fireDamageA * 20));
b.getWorld().playSound(b.getEyeLocation(), Sound.FIRE_IGNITE, 1f, 1f);
}
}
if (lightningDamageA > 0) {
if (random.nextDouble() < (shockChanceA * (1 - resistB))) {
trueDamage = lightningDamageA;
b.getWorld().playSound(b.getEyeLocation(), Sound.AMBIENCE_THUNDER, 1f, 1.5f);
a.getWorld().playSound(a.getEyeLocation(), Sound.AMBIENCE_THUNDER, 1f, 1.5f);
}
}
if (iceDamageA > 0) {
if (random.nextDouble() < (freezeChanceA * (1 - resistB))) {
damage = damage + iceDamageA + Math.min(((healthB / 200) * iceDamageA), iceDamageA*4);
b.addPotionEffect(new PotionEffect(PotionEffectType.SLOW, 40, 2));
b.getWorld().playSound(b.getEyeLocation(), Sound.GLASS, 1f, 1f);
}
}
event.setDamage(EntityDamageEvent.DamageModifier.BASE,
((damage * damageReducer * blockReducer) + trueDamage) * pvpMult);
if (a instanceof Player) {
lifeStolenA = event.getFinalDamage() * lifeStealA * poisonMult * hungerMult;
a.setHealth(Math.min(a.getHealth() + lifeStolenA, a.getMaxHealth()));
}
}
}
}
| More accurate creeper damage, that always does at least 20% damage
| src/main/java/info/faceland/strife/listeners/CombatListener.java | More accurate creeper damage, that always does at least 20% damage |
|
Java | mit | eccaf50a18672f893c7b062a54800ad7efbf2c2c | 0 | codeborne/selenide,codeborne/selenide,simple-elf/selenide,simple-elf/selenide,codeborne/selenide,simple-elf/selenide,simple-elf/selenide | package com.codeborne.selenide.impl;
import com.codeborne.selenide.WebDriverProvider;
import org.openqa.selenium.*;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.internal.Killable;
import org.openqa.selenium.phantomjs.PhantomJSDriverService;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.remote.SessionNotFoundException;
import org.openqa.selenium.remote.UnreachableBrowserException;
import org.openqa.selenium.support.events.EventFiringWebDriver;
import org.openqa.selenium.support.events.WebDriverEventListener;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.codeborne.selenide.Configuration.*;
import static com.codeborne.selenide.WebDriverRunner.*;
import static java.lang.Thread.currentThread;
import static org.openqa.selenium.remote.CapabilityType.*;
public class WebDriverThreadLocalContainer {
protected List<WebDriverEventListener> listeners = new ArrayList<WebDriverEventListener>();
protected Collection<Thread> ALL_WEB_DRIVERS_THREADS = new ConcurrentLinkedQueue<Thread>();
protected Map<Long, WebDriver> THREAD_WEB_DRIVER = new ConcurrentHashMap<Long, WebDriver>(4);
protected Proxy webProxySettings;
protected final AtomicBoolean cleanupThreadStarted = new AtomicBoolean(false);
protected void closeUnusedWebdrivers() {
for (Thread thread : ALL_WEB_DRIVERS_THREADS) {
if (!thread.isAlive()) {
closeWebDriver(thread);
}
}
}
public void addListener(WebDriverEventListener listener) {
listeners.add(listener);
}
public WebDriver setWebDriver(WebDriver webDriver) {
THREAD_WEB_DRIVER.put(currentThread().getId(), webDriver);
return webDriver;
}
public void setProxy(Proxy webProxy) {
webProxySettings=webProxy;
}
protected boolean isBrowserStillOpen(WebDriver webDriver) {
try {
webDriver.getTitle();
return true;
} catch (UnreachableBrowserException e) {
return false;
} catch (NoSuchWindowException e) {
return false;
} catch (SessionNotFoundException e) {
return false;
}
}
/**
* @return true iff webdriver is started in current thread
*/
public boolean hasWebDriverStarted() {
return THREAD_WEB_DRIVER.containsKey(currentThread().getId());
}
public WebDriver getWebDriver() {
WebDriver webDriver = THREAD_WEB_DRIVER.get(currentThread().getId());
return webDriver != null ? webDriver : setWebDriver(createDriver());
}
public WebDriver getAndCheckWebDriver() {
WebDriver webDriver = THREAD_WEB_DRIVER.get(currentThread().getId());
if (webDriver != null) {
if (isBrowserStillOpen(webDriver)) {
return webDriver;
}
else {
System.out.println("Webdriver has been closed meanwhile. Let's re-create it.");
closeWebDriver();
}
}
return setWebDriver(createDriver());
}
public void closeWebDriver() {
closeWebDriver(currentThread());
}
protected void closeWebDriver(Thread thread) {
ALL_WEB_DRIVERS_THREADS.remove(thread);
WebDriver webdriver = THREAD_WEB_DRIVER.remove(thread.getId());
if (webdriver != null && !holdBrowserOpen) {
System.out.println(" === CLOSE WEBDRIVER: " + thread.getId() + " -> " + webdriver);
try {
webdriver.quit();
}
catch (UnreachableBrowserException ignored) {
// It happens for Firefox. It's ok: browser is already closed.
}
catch (WebDriverException cannotCloseBrowser) {
System.err.println("Cannot close browser normally: " + Cleanup.of.webdriverExceptionMessage(cannotCloseBrowser));
}
finally {
killBrowser(webdriver);
}
}
}
protected void killBrowser(WebDriver webdriver) {
if (webdriver instanceof Killable) {
try {
((Killable) webdriver).kill();
} catch (Exception e) {
System.err.println("Failed to kill browser " + webdriver + ':');
e.printStackTrace();
}
}
}
public void clearBrowserCache() {
WebDriver webdriver = THREAD_WEB_DRIVER.get(currentThread().getId());
if (webdriver != null) {
webdriver.manage().deleteAllCookies();
}
}
public String getPageSource() {
return getWebDriver().getPageSource();
}
public String getCurrentUrl() {
return getWebDriver().getCurrentUrl();
}
protected WebDriver createDriver() {
WebDriver webdriver = remote != null ? createRemoteDriver(remote, browser) :
CHROME.equalsIgnoreCase(browser) ? createChromeDriver() :
isFirefox() ? createFirefoxDriver() :
isHtmlUnit() ? createHtmlUnitDriver() :
isIE() ? createInternetExplorerDriver() :
isPhantomjs() ? createPhantomJsDriver() :
isOpera() ? createOperaDriver() :
isSafari() ? createSafariDriver() :
createInstanceOf(browser);
webdriver = maximize(webdriver);
System.out.println(" === CREATE WEBDRIVER: " + currentThread().getId() + " -> " + webdriver);
return markForAutoClose(listeners.isEmpty() ? webdriver : addListeners(webdriver));
}
protected WebDriver addListeners(WebDriver webdriver) {
EventFiringWebDriver wrapper = new EventFiringWebDriver(webdriver);
for (WebDriverEventListener listener : listeners) {
wrapper.register(listener);
}
return wrapper;
}
protected WebDriver markForAutoClose(WebDriver webDriver) {
ALL_WEB_DRIVERS_THREADS.add(currentThread());
if (!cleanupThreadStarted.get()) {
synchronized (cleanupThreadStarted) {
if (!cleanupThreadStarted.get()) {
new UnusedWebdriversCleanupThread().start();
cleanupThreadStarted.set(true);
}
}
}
Runtime.getRuntime().addShutdownHook(new WebdriversFinalCleanupThread(currentThread()));
return webDriver;
}
protected WebDriver createChromeDriver() {
DesiredCapabilities capabilities = createCommonCapabilities();
ChromeOptions options = new ChromeOptions();
options.addArguments("test-type");
capabilities.setCapability(ChromeOptions.CAPABILITY, options);
return new ChromeDriver(capabilities);
}
protected WebDriver createFirefoxDriver() {
DesiredCapabilities capabilities = createCommonCapabilities();
return new FirefoxDriver(capabilities);
}
protected WebDriver createHtmlUnitDriver() {
DesiredCapabilities capabilities = DesiredCapabilities.htmlUnitWithJs();
capabilities.merge(createCommonCapabilities());
capabilities.setCapability(HtmlUnitDriver.INVALIDSELECTIONERROR, true);
capabilities.setCapability(HtmlUnitDriver.INVALIDXPATHERROR, false);
if (browser.indexOf(':') > -1) {
// Use constants BrowserType.IE, BrowserType.FIREFOX, BrowserType.CHROME etc.
String emulatedBrowser = browser.replaceFirst("htmlunit:(.*)", "$1");
capabilities.setVersion(emulatedBrowser);
}
return new HtmlUnitDriver(capabilities);
}
protected WebDriver createInternetExplorerDriver() {
DesiredCapabilities capabilities = createCommonCapabilities();
return new InternetExplorerDriver(capabilities);
}
protected WebDriver createPhantomJsDriver() {
return createInstanceOf("org.openqa.selenium.phantomjs.PhantomJSDriver");
}
protected WebDriver createOperaDriver() {
return createInstanceOf("com.opera.core.systems.OperaDriver");
}
protected WebDriver createSafariDriver() {
return createInstanceOf("org.openqa.selenium.safari.SafariDriver");
}
protected WebDriver maximize(WebDriver driver) {
if (startMaximized) {
try {
if (isChrome()) {
maximizeChromeBrowser(driver.manage().window());
}
else {
driver.manage().window().maximize();
}
}
catch (Exception cannotMaximize) {
System.out.println("Cannot maximize " + browser + ": " + cannotMaximize);
}
}
return driver;
}
protected void maximizeChromeBrowser(WebDriver.Window window) {
// Chrome driver does not yet support maximizing. Let' apply black magic!
java.awt.Toolkit toolkit = java.awt.Toolkit.getDefaultToolkit();
Dimension screenResolution = new Dimension(
(int) toolkit.getScreenSize().getWidth(),
(int) toolkit.getScreenSize().getHeight());
window.setSize(screenResolution);
window.setPosition(new org.openqa.selenium.Point(0, 0));
}
protected WebDriver createInstanceOf(String className) {
try {
DesiredCapabilities capabilities = createCommonCapabilities();
capabilities.setJavascriptEnabled(true);
capabilities.setCapability(TAKES_SCREENSHOT, true);
capabilities.setCapability(ACCEPT_SSL_CERTS, true);
capabilities.setCapability(SUPPORTS_ALERTS, true);
if (isPhantomjs()) {
capabilities.setCapability("phantomjs.cli.args", // PhantomJSDriverService.PHANTOMJS_CLI_ARGS == "phantomjs.cli.args"
new String[] {"--web-security=no", "--ignore-ssl-errors=yes"});
}
Class<?> clazz = Class.forName(className);
if (WebDriverProvider.class.isAssignableFrom(clazz)) {
return ((WebDriverProvider) clazz.newInstance()).createDriver(capabilities);
} else {
Constructor<?> constructor = Class.forName(className).getConstructor(Capabilities.class);
return (WebDriver) constructor.newInstance(capabilities);
}
}
catch (InvocationTargetException e) {
throw runtime(e.getTargetException());
}
catch (Exception invalidClassName) {
throw new IllegalArgumentException(invalidClassName);
}
}
protected RuntimeException runtime(Throwable exception) {
return exception instanceof RuntimeException ? (RuntimeException) exception : new RuntimeException(exception);
}
protected WebDriver createRemoteDriver(String remote, String browser) {
try {
DesiredCapabilities capabilities = createCommonCapabilities();
capabilities.setBrowserName(browser);
return new RemoteWebDriver(new URL(remote), capabilities);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid 'remote' parameter: " + remote, e);
}
}
protected DesiredCapabilities createCommonCapabilities() {
DesiredCapabilities browserCapabilities = new DesiredCapabilities();
if (webProxySettings != null) {
browserCapabilities.setCapability(PROXY, webProxySettings);
}
return browserCapabilities;
}
protected class WebdriversFinalCleanupThread extends Thread {
private final Thread thread;
public WebdriversFinalCleanupThread(Thread thread) {
this.thread = thread;
}
@Override
public void run() {
closeWebDriver(thread);
}
}
protected class UnusedWebdriversCleanupThread extends Thread {
public UnusedWebdriversCleanupThread() {
setDaemon(true);
setName("Webdrivers killer thread");
}
@Override
public void run() {
while (true) {
closeUnusedWebdrivers();
try {
Thread.sleep(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}
}
}
| src/main/java/com/codeborne/selenide/impl/WebDriverThreadLocalContainer.java | package com.codeborne.selenide.impl;
import com.codeborne.selenide.WebDriverProvider;
import org.openqa.selenium.*;
import org.openqa.selenium.chrome.ChromeDriver;
import org.openqa.selenium.chrome.ChromeOptions;
import org.openqa.selenium.firefox.FirefoxDriver;
import org.openqa.selenium.htmlunit.HtmlUnitDriver;
import org.openqa.selenium.ie.InternetExplorerDriver;
import org.openqa.selenium.internal.Killable;
import org.openqa.selenium.remote.DesiredCapabilities;
import org.openqa.selenium.remote.RemoteWebDriver;
import org.openqa.selenium.remote.SessionNotFoundException;
import org.openqa.selenium.remote.UnreachableBrowserException;
import org.openqa.selenium.support.events.EventFiringWebDriver;
import org.openqa.selenium.support.events.WebDriverEventListener;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentLinkedQueue;
import java.util.concurrent.atomic.AtomicBoolean;
import static com.codeborne.selenide.Configuration.*;
import static com.codeborne.selenide.WebDriverRunner.*;
import static java.lang.Thread.currentThread;
import static org.openqa.selenium.remote.CapabilityType.*;
public class WebDriverThreadLocalContainer {
protected List<WebDriverEventListener> listeners = new ArrayList<WebDriverEventListener>();
protected Collection<Thread> ALL_WEB_DRIVERS_THREADS = new ConcurrentLinkedQueue<Thread>();
protected Map<Long, WebDriver> THREAD_WEB_DRIVER = new ConcurrentHashMap<Long, WebDriver>(4);
protected Proxy webProxySettings;
protected final AtomicBoolean cleanupThreadStarted = new AtomicBoolean(false);
protected void closeUnusedWebdrivers() {
for (Thread thread : ALL_WEB_DRIVERS_THREADS) {
if (!thread.isAlive()) {
closeWebDriver(thread);
}
}
}
public void addListener(WebDriverEventListener listener) {
listeners.add(listener);
}
public WebDriver setWebDriver(WebDriver webDriver) {
THREAD_WEB_DRIVER.put(currentThread().getId(), webDriver);
return webDriver;
}
public void setProxy(Proxy webProxy) {
webProxySettings=webProxy;
}
protected boolean isBrowserStillOpen(WebDriver webDriver) {
try {
webDriver.getTitle();
return true;
} catch (UnreachableBrowserException e) {
return false;
} catch (NoSuchWindowException e) {
return false;
} catch (SessionNotFoundException e) {
return false;
}
}
/**
* @return true iff webdriver is started in current thread
*/
public boolean hasWebDriverStarted() {
return THREAD_WEB_DRIVER.containsKey(currentThread().getId());
}
public WebDriver getWebDriver() {
WebDriver webDriver = THREAD_WEB_DRIVER.get(currentThread().getId());
return webDriver != null ? webDriver : setWebDriver(createDriver());
}
public WebDriver getAndCheckWebDriver() {
WebDriver webDriver = THREAD_WEB_DRIVER.get(currentThread().getId());
if (webDriver != null) {
if (isBrowserStillOpen(webDriver)) {
return webDriver;
}
else {
System.out.println("Webdriver has been closed meanwhile. Let's re-create it.");
closeWebDriver();
}
}
return setWebDriver(createDriver());
}
public void closeWebDriver() {
closeWebDriver(currentThread());
}
protected void closeWebDriver(Thread thread) {
ALL_WEB_DRIVERS_THREADS.remove(thread);
WebDriver webdriver = THREAD_WEB_DRIVER.remove(thread.getId());
if (webdriver != null && !holdBrowserOpen) {
System.out.println(" === CLOSE WEBDRIVER: " + thread.getId() + " -> " + webdriver);
try {
webdriver.quit();
}
catch (UnreachableBrowserException ignored) {
// It happens for Firefox. It's ok: browser is already closed.
}
catch (WebDriverException cannotCloseBrowser) {
System.err.println("Cannot close browser normally: " + Cleanup.of.webdriverExceptionMessage(cannotCloseBrowser));
}
finally {
killBrowser(webdriver);
}
}
}
protected void killBrowser(WebDriver webdriver) {
if (webdriver instanceof Killable) {
try {
((Killable) webdriver).kill();
} catch (Exception e) {
System.err.println("Failed to kill browser " + webdriver + ':');
e.printStackTrace();
}
}
}
public void clearBrowserCache() {
WebDriver webdriver = THREAD_WEB_DRIVER.get(currentThread().getId());
if (webdriver != null) {
webdriver.manage().deleteAllCookies();
}
}
public String getPageSource() {
return getWebDriver().getPageSource();
}
public String getCurrentUrl() {
return getWebDriver().getCurrentUrl();
}
protected WebDriver createDriver() {
WebDriver webdriver = remote != null ? createRemoteDriver(remote, browser) :
CHROME.equalsIgnoreCase(browser) ? createChromeDriver() :
isFirefox() ? createFirefoxDriver() :
isHtmlUnit() ? createHtmlUnitDriver() :
isIE() ? createInternetExplorerDriver() :
isPhantomjs() ? createPhantomJsDriver() :
isOpera() ? createOperaDriver() :
isSafari() ? createSafariDriver() :
createInstanceOf(browser);
webdriver = maximize(webdriver);
System.out.println(" === CREATE WEBDRIVER: " + currentThread().getId() + " -> " + webdriver);
return markForAutoClose(listeners.isEmpty() ? webdriver : addListeners(webdriver));
}
protected WebDriver addListeners(WebDriver webdriver) {
EventFiringWebDriver wrapper = new EventFiringWebDriver(webdriver);
for (WebDriverEventListener listener : listeners) {
wrapper.register(listener);
}
return wrapper;
}
protected WebDriver markForAutoClose(WebDriver webDriver) {
ALL_WEB_DRIVERS_THREADS.add(currentThread());
if (!cleanupThreadStarted.get()) {
synchronized (cleanupThreadStarted) {
if (!cleanupThreadStarted.get()) {
new UnusedWebdriversCleanupThread().start();
cleanupThreadStarted.set(true);
}
}
}
Runtime.getRuntime().addShutdownHook(new WebdriversFinalCleanupThread(currentThread()));
return webDriver;
}
protected WebDriver createChromeDriver() {
DesiredCapabilities capabilities = createCommonCapabilities();
ChromeOptions options = new ChromeOptions();
options.addArguments("test-type");
capabilities.setCapability(ChromeOptions.CAPABILITY, options);
return new ChromeDriver(capabilities);
}
protected WebDriver createFirefoxDriver() {
DesiredCapabilities capabilities = createCommonCapabilities();
return new FirefoxDriver(capabilities);
}
protected WebDriver createHtmlUnitDriver() {
DesiredCapabilities capabilities = DesiredCapabilities.htmlUnitWithJs();
capabilities.merge(createCommonCapabilities());
capabilities.setCapability(HtmlUnitDriver.INVALIDSELECTIONERROR, true);
capabilities.setCapability(HtmlUnitDriver.INVALIDXPATHERROR, false);
if (browser.indexOf(':') > -1) {
// Use constants BrowserType.IE, BrowserType.FIREFOX, BrowserType.CHROME etc.
String emulatedBrowser = browser.replaceFirst("htmlunit:(.*)", "$1");
capabilities.setVersion(emulatedBrowser);
}
return new HtmlUnitDriver(capabilities);
}
protected WebDriver createInternetExplorerDriver() {
DesiredCapabilities capabilities = createCommonCapabilities();
return new InternetExplorerDriver(capabilities);
}
protected WebDriver createPhantomJsDriver() {
return createInstanceOf("org.openqa.selenium.phantomjs.PhantomJSDriver");
}
protected WebDriver createOperaDriver() {
return createInstanceOf("com.opera.core.systems.OperaDriver");
}
protected WebDriver createSafariDriver() {
return createInstanceOf("org.openqa.selenium.safari.SafariDriver");
}
protected WebDriver maximize(WebDriver driver) {
if (startMaximized) {
try {
if (isChrome()) {
maximizeChromeBrowser(driver.manage().window());
}
else {
driver.manage().window().maximize();
}
}
catch (Exception cannotMaximize) {
System.out.println("Cannot maximize " + browser + ": " + cannotMaximize);
}
}
return driver;
}
protected void maximizeChromeBrowser(WebDriver.Window window) {
// Chrome driver does not yet support maximizing. Let' apply black magic!
java.awt.Toolkit toolkit = java.awt.Toolkit.getDefaultToolkit();
Dimension screenResolution = new Dimension(
(int) toolkit.getScreenSize().getWidth(),
(int) toolkit.getScreenSize().getHeight());
window.setSize(screenResolution);
window.setPosition(new org.openqa.selenium.Point(0, 0));
}
protected WebDriver createInstanceOf(String className) {
try {
DesiredCapabilities capabilities = createCommonCapabilities();
capabilities.setJavascriptEnabled(true);
capabilities.setCapability(TAKES_SCREENSHOT, true);
capabilities.setCapability(ACCEPT_SSL_CERTS, true);
capabilities.setCapability(SUPPORTS_ALERTS, true);
Class<?> clazz = Class.forName(className);
if (WebDriverProvider.class.isAssignableFrom(clazz)) {
return ((WebDriverProvider) clazz.newInstance()).createDriver(capabilities);
} else {
Constructor<?> constructor = Class.forName(className).getConstructor(Capabilities.class);
return (WebDriver) constructor.newInstance(capabilities);
}
}
catch (InvocationTargetException e) {
throw runtime(e.getTargetException());
}
catch (Exception invalidClassName) {
throw new IllegalArgumentException(invalidClassName);
}
}
protected RuntimeException runtime(Throwable exception) {
return exception instanceof RuntimeException ? (RuntimeException) exception : new RuntimeException(exception);
}
protected WebDriver createRemoteDriver(String remote, String browser) {
try {
DesiredCapabilities capabilities = createCommonCapabilities();
capabilities.setBrowserName(browser);
return new RemoteWebDriver(new URL(remote), capabilities);
} catch (MalformedURLException e) {
throw new IllegalArgumentException("Invalid 'remote' parameter: " + remote, e);
}
}
protected DesiredCapabilities createCommonCapabilities() {
DesiredCapabilities browserCapabilities = new DesiredCapabilities();
if (webProxySettings != null) {
browserCapabilities.setCapability(PROXY, webProxySettings);
}
return browserCapabilities;
}
protected class WebdriversFinalCleanupThread extends Thread {
private final Thread thread;
public WebdriversFinalCleanupThread(Thread thread) {
this.thread = thread;
}
@Override
public void run() {
closeWebDriver(thread);
}
}
protected class UnusedWebdriversCleanupThread extends Thread {
public UnusedWebdriversCleanupThread() {
setDaemon(true);
setName("Webdrivers killer thread");
}
@Override
public void run() {
while (true) {
closeUnusedWebdrivers();
try {
Thread.sleep(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}
}
}
| #164 ignore self-signed certificates in PhantomJS driver
| src/main/java/com/codeborne/selenide/impl/WebDriverThreadLocalContainer.java | #164 ignore self-signed certificates in PhantomJS driver |
|
Java | mit | befb4de873f4fe6661819ee595a2609e97df0d61 | 0 | oldterns/VileBot,oldterns/VileBot | /**
* Copyright (C) 2013 Oldterns
*
* This file may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
package com.oldterns.vilebot.handlers.user;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import com.oldterns.vilebot.db.ChurchDB;
import com.oldterns.vilebot.db.QuoteFactDB;
import com.oldterns.vilebot.handlers.user.Jaziz;
import com.oldterns.vilebot.util.BaseNick;
import com.oldterns.vilebot.util.Ignore;
import net.engio.mbassy.listener.Handler;
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.interfaces.Replyable;
import ca.szc.keratin.core.event.message.recieve.ReceiveJoin;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.util.StringUtil;
@HandlerContainer
public class QuotesAndFacts
{
private static final Pattern nounPattern = Pattern.compile( "\\S+" );
private static final Pattern addPattern = Pattern.compile( "^!(fact|quote)add (" + nounPattern + ") (.+)$" );
private static final Pattern dumpPattern = Pattern.compile( "^!(fact|quote)dump (" + nounPattern + ")\\s*$" );
private static final Pattern randomPattern = Pattern.compile( "^!(fact|quote)random5 (" + nounPattern + ")\\s*$" );
private static final Pattern numPattern = Pattern.compile( "^!(fact|quote)number (" + nounPattern + ")\\s*$" );
private static final Pattern queryPattern =
Pattern.compile( "^!(fact|quote) (" + nounPattern + ")( !jaziz)?\\s*$" );
// "( !jaziz)" is not included in searchPattern because it is handled in factQuoteSearch method
private static final Pattern searchPattern = Pattern.compile( "^!(fact|quote)search (" + nounPattern + ") (.*)$" );
private static final Random random = new Random();
@Handler
private void factQuoteAdd( ReceivePrivmsg event )
{
Matcher matcher = addPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String noun = BaseNick.toBaseNick( matcher.group( 2 ) );
String text = matcher.group( 3 );
String sender = BaseNick.toBaseNick( event.getSender() );
if ( !sender.equals( noun ) )
{
text = trimChars( text, " '\"" );
if ( "fact".equals( mode ) )
{
QuoteFactDB.addFact( noun, text );
event.reply( formatFactReply( noun, text ) );
}
else
{
QuoteFactDB.addQuote( noun, text );
event.reply( formatQuoteReply( noun, text ) );
}
}
else
{
event.reply( StringUtil.capitalizeFirstLetter( mode )
+ "s from yourself are both terrible and uninteresting." );
}
}
}
@Handler
private void factQuoteDump( ReceivePrivmsg event )
{
Matcher matcher = dumpPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String queried = BaseNick.toBaseNick( matcher.group( 2 ) );
if ( "fact".equals( mode ) )
{
Set<String> allFacts = QuoteFactDB.getFacts( queried );
if ( allFacts.isEmpty() )
{
event.replyPrivately( queried + " has no facts." );
}
for ( String fact : allFacts )
{
event.replyPrivately( formatFactReply( queried, fact ) );
}
}
else
{
Set<String> allQuotes = QuoteFactDB.getQuotes( queried );
if ( allQuotes.isEmpty() )
{
event.replyPrivately( queried + " has no quotes." );
}
for ( String quote : allQuotes )
{
event.replyPrivately( formatFactReply( queried, quote ) );
}
}
}
}
@Handler
private void factQuoteRandomDump( ReceivePrivmsg event )
{
Matcher matcher = randomPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String queried = BaseNick.toBaseNick( matcher.group( 2 ) );
if ( "fact".equals( mode ) )
{
Long factsLength = QuoteFactDB.getFactsLength( queried );
if ( factsLength == 0 )
{
event.replyPrivately( queried + " has no facts." );
}
else if ( factsLength <= 5 )
{
Set<String> allFacts = QuoteFactDB.getFacts( queried );
for ( String fact : allFacts )
{
event.replyPrivately( formatFactReply( queried, fact ) );
}
}
else
{
List<String> randomFacts = QuoteFactDB.getRandFacts( queried );
for ( String fact : randomFacts )
{
event.replyPrivately( formatFactReply( queried, fact ) );
}
}
}
else
{
Long quotesLength = QuoteFactDB.getQuotesLength( queried );
if ( quotesLength == 0 )
{
event.replyPrivately( queried + " has no quotes." );
}
else if ( quotesLength <= 5 )
{
Set<String> allQuotes = QuoteFactDB.getQuotes( queried );
for ( String quote : allQuotes )
{
event.replyPrivately( formatQuoteReply( queried, quote ) );
}
}
else
{
List<String> randomQuote = QuoteFactDB.getRandQuotes( queried );
for ( String quote : randomQuote )
{
event.replyPrivately( formatQuoteReply( queried, quote ) );
}
}
}
}
}
@Handler
private void factQuoteNum( ReceivePrivmsg event )
{
Matcher matcher = numPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String queried = BaseNick.toBaseNick( matcher.group( 2 ) );
if ( "fact".equals( mode ) )
{
Long factsLength = QuoteFactDB.getFactsLength( queried );
if ( factsLength == 0 )
{
event.replyPrivately( queried + " has no facts." );
}
else
{
event.replyPrivately( queried + " has " + factsLength + " facts." );
}
}
else
{
Long quotesLength = QuoteFactDB.getQuotesLength( queried );
if ( quotesLength == 0 )
{
event.replyPrivately( queried + " has no quotes." );
}
else
{
event.replyPrivately( queried + " has " + quotesLength + " quotes." );
}
}
}
}
@Handler
private void factQuoteQuery( ReceivePrivmsg event )
{
Matcher matcher = queryPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String noun = BaseNick.toBaseNick( matcher.group( 2 ) );
// check if quote/fact needs to be piped to jaziz
boolean jaziz = event.getText().lastIndexOf( "!jaziz" ) >= 0;
if ( "fact".equals( mode ) )
{
if ( !replyWithFact( noun, event, jaziz ) )
{
event.reply( noun + " has no facts." );
}
}
else
{
if ( !replyWithQuote( noun, event, jaziz ) )
{
event.reply( noun + " has no quotes." );
}
}
}
}
@Handler
private void factQuoteSearch( ReceivePrivmsg event )
{
Matcher matcher = searchPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String noun = BaseNick.toBaseNick( matcher.group( 2 ) );
String regex = matcher.group( 3 );
// check if quote/fact needs to be piped to jaziz
int jazizIdx = regex.lastIndexOf( "!jaziz" );
boolean jaziz = jazizIdx >= 0;
if ( jaziz )
{
regex = regex.substring( 0, jazizIdx - 1 );
}
try
{
// Case insensitive added automatically, use (?-i) in a message to reenable case sensitivity
Pattern pattern = Pattern.compile( "(?i)" + regex );
if ( "fact".equals( mode ) )
{
Set<String> texts = QuoteFactDB.getFacts( noun );
if ( texts != null )
{
String randomMatch = regexSetSearch( texts, pattern );
if ( randomMatch != null )
{
if ( jaziz )
{
try
{
event.reply( formatFactReply( noun, Jaziz.jazizify( randomMatch ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatFactReply( noun, randomMatch ) );
}
}
else
{
event.reply( noun + " has no matching facts." );
}
}
else
{
event.reply( noun + " has no facts." );
}
}
else
{
Set<String> texts = QuoteFactDB.getQuotes( noun );
if ( texts != null )
{
String randomMatch = regexSetSearch( texts, pattern );
if ( randomMatch != null )
{
if ( jaziz )
{
try
{
event.reply( formatQuoteReply( noun, Jaziz.jazizify( randomMatch ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatQuoteReply( noun, randomMatch ) );
}
}
else
{
event.reply( noun + " has no matching quotes." );
}
}
else
{
event.reply( noun + " has no quotes." );
}
}
}
catch ( PatternSyntaxException e )
{
event.reply( "Syntax error in regex pattern" );
}
}
}
private static String regexSetSearch( Set<String> texts, Pattern pattern )
{
List<String> matchingTexts = new LinkedList<String>();
for ( String text : texts )
{
Matcher matcher = pattern.matcher( text );
if ( matcher.find() )
{
matchingTexts.add( text );
}
}
int matchCount = matchingTexts.size();
if ( matchCount > 0 )
{
int selection = random.nextInt( matchCount );
return matchingTexts.get( selection );
}
else
{
return null;
}
}
@Handler
private void announceFactOrQuoteOnJoin( ReceiveJoin event )
{
String baseNick = BaseNick.toBaseNick( event.getJoiner() );
if ( !Ignore.getOnJoin().contains( baseNick ) )
{
if ( random.nextBoolean() )
{
if ( !replyWithQuote( baseNick, event, false ) )
replyWithFact( baseNick, event, false );
}
else
{
if ( !replyWithFact( baseNick, event, false ) )
replyWithQuote( baseNick, event, false );
}
}
}
private static boolean replyWithFact( String noun, Replyable event, boolean jaziz )
{
String text = QuoteFactDB.getRandFact( noun );
if ( text != null )
{
if ( ChurchDB.getDonorRank( noun ) != null && ChurchDB.getDonorRank( noun ) < 4 )
{
String title = ChurchDB.getDonorTitle( noun );
if ( title.trim().length() > 0 )
{
noun = title;
}
}
String replyText = formatFactReply( noun, text );
if ( jaziz )
{
try
{
event.reply( formatFactReply( noun, Jaziz.jazizify( text ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatFactReply( noun, text ) );
}
return true;
}
return false;
}
private static String formatFactReply( String noun, String fact )
{
return noun + " " + fact;
}
private static boolean replyWithQuote( String noun, Replyable event, boolean jaziz )
{
String text = QuoteFactDB.getRandQuote( noun );
if ( text != null )
{
if ( ChurchDB.getDonorRank( noun ) != null && ChurchDB.getDonorRank( noun ) < 4 )
{
String title = ChurchDB.getDonorTitle( noun );
if ( title.trim().length() > 0 )
{
noun = title;
}
}
if ( jaziz )
{
try
{
event.reply( formatQuoteReply( noun, Jaziz.jazizify( text ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatQuoteReply( noun, text ) );
}
return true;
}
return false;
}
private static String formatQuoteReply( String noun, String quote )
{
return noun + " once said, \"" + quote + "\".";
}
/**
* Removes all specified leading and trailing characters in the array charsToRemove.
*
* @param input The string to process
* @param charsToRemove All characters to remove, treated as a set
* @return A copy of the input String with the characters removed
* @see String.trim()
*/
private static String trimChars( String input, String charsToRemove )
{
char[] value = input.toCharArray();
char[] rmChars = charsToRemove.toCharArray();
Arrays.sort( rmChars );
int len = value.length;
int st = 0;
while ( ( st < len ) && ( Arrays.binarySearch( rmChars, value[st] ) >= 0 ) )
{
st++;
}
while ( ( st < len ) && ( Arrays.binarySearch( rmChars, value[len - 1] ) >= 0 ) )
{
len--;
}
return new String( input.substring( st, len ) );
}
}
| vilebot/src/main/java/com/oldterns/vilebot/handlers/user/QuotesAndFacts.java | /**
* Copyright (C) 2013 Oldterns
*
* This file may be modified and distributed under the terms
* of the MIT license. See the LICENSE file for details.
*/
package com.oldterns.vilebot.handlers.user;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.regex.PatternSyntaxException;
import com.oldterns.vilebot.db.ChurchDB;
import com.oldterns.vilebot.db.QuoteFactDB;
import com.oldterns.vilebot.handlers.user.Jaziz;
import com.oldterns.vilebot.util.BaseNick;
import com.oldterns.vilebot.util.Ignore;
import net.engio.mbassy.listener.Handler;
import ca.szc.keratin.bot.annotation.HandlerContainer;
import ca.szc.keratin.core.event.message.interfaces.Replyable;
import ca.szc.keratin.core.event.message.recieve.ReceiveJoin;
import ca.szc.keratin.core.event.message.recieve.ReceivePrivmsg;
import com.oldterns.vilebot.util.StringUtil;
@HandlerContainer
public class QuotesAndFacts
{
private static final Pattern nounPattern = Pattern.compile( "\\S+" );
private static final Pattern addPattern = Pattern.compile( "^!(fact|quote)add (" + nounPattern + ") (.+)$" );
private static final Pattern dumpPattern = Pattern.compile( "^!(fact|quote)dump (" + nounPattern + ")\\s*$" );
private static final Pattern randomPattern = Pattern.compile( "^!(fact|quote)random5 (" + nounPattern + ")\\s*$" );
private static final Pattern numPattern = Pattern.compile( "^!(fact|quote)number (" + nounPattern + ")\\s*$" );
private static final Pattern queryPattern =
Pattern.compile( "^!(fact|quote) (" + nounPattern + ")( !jaziz)?\\s*$" );
private static final Pattern searchPattern = Pattern.compile( "^!(fact|quote)search (" + nounPattern + ") (.*)$" );
private static final Random random = new Random();
@Handler
private void factQuoteAdd( ReceivePrivmsg event )
{
Matcher matcher = addPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String noun = BaseNick.toBaseNick( matcher.group( 2 ) );
String text = matcher.group( 3 );
String sender = BaseNick.toBaseNick( event.getSender() );
if ( !sender.equals( noun ) )
{
text = trimChars( text, " '\"" );
if ( "fact".equals( mode ) )
{
QuoteFactDB.addFact( noun, text );
event.reply( formatFactReply( noun, text ) );
}
else
{
QuoteFactDB.addQuote( noun, text );
event.reply( formatQuoteReply( noun, text ) );
}
}
else
{
event.reply( StringUtil.capitalizeFirstLetter( mode )
+ "s from yourself are both terrible and uninteresting." );
}
}
}
@Handler
private void factQuoteDump( ReceivePrivmsg event )
{
Matcher matcher = dumpPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String queried = BaseNick.toBaseNick( matcher.group( 2 ) );
if ( "fact".equals( mode ) )
{
Set<String> allFacts = QuoteFactDB.getFacts( queried );
if ( allFacts.isEmpty() )
{
event.replyPrivately( queried + " has no facts." );
}
for ( String fact : allFacts )
{
event.replyPrivately( formatFactReply( queried, fact ) );
}
}
else
{
Set<String> allQuotes = QuoteFactDB.getQuotes( queried );
if ( allQuotes.isEmpty() )
{
event.replyPrivately( queried + " has no quotes." );
}
for ( String quote : allQuotes )
{
event.replyPrivately( formatFactReply( queried, quote ) );
}
}
}
}
@Handler
private void factQuoteRandomDump( ReceivePrivmsg event )
{
Matcher matcher = randomPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String queried = BaseNick.toBaseNick( matcher.group( 2 ) );
if ( "fact".equals( mode ) )
{
Long factsLength = QuoteFactDB.getFactsLength( queried );
if ( factsLength == 0 )
{
event.replyPrivately( queried + " has no facts." );
}
else if ( factsLength <= 5 )
{
Set<String> allFacts = QuoteFactDB.getFacts( queried );
for ( String fact : allFacts )
{
event.replyPrivately( formatFactReply( queried, fact ) );
}
}
else
{
List<String> randomFacts = QuoteFactDB.getRandFacts( queried );
for ( String fact : randomFacts )
{
event.replyPrivately( formatFactReply( queried, fact ) );
}
}
}
else
{
Long quotesLength = QuoteFactDB.getQuotesLength( queried );
if ( quotesLength == 0 )
{
event.replyPrivately( queried + " has no quotes." );
}
else if ( quotesLength <= 5 )
{
Set<String> allQuotes = QuoteFactDB.getQuotes( queried );
for ( String quote : allQuotes )
{
event.replyPrivately( formatQuoteReply( queried, quote ) );
}
}
else
{
List<String> randomQuote = QuoteFactDB.getRandQuotes( queried );
for ( String quote : randomQuote )
{
event.replyPrivately( formatQuoteReply( queried, quote ) );
}
}
}
}
}
@Handler
private void factQuoteNum( ReceivePrivmsg event )
{
Matcher matcher = numPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String queried = BaseNick.toBaseNick( matcher.group( 2 ) );
if ( "fact".equals( mode ) )
{
Long factsLength = QuoteFactDB.getFactsLength( queried );
if ( factsLength == 0 )
{
event.replyPrivately( queried + " has no facts." );
}
else
{
event.replyPrivately( queried + " has " + factsLength + " facts." );
}
}
else
{
Long quotesLength = QuoteFactDB.getQuotesLength( queried );
if ( quotesLength == 0 )
{
event.replyPrivately( queried + " has no quotes." );
}
else
{
event.replyPrivately( queried + " has " + quotesLength + " quotes." );
}
}
}
}
@Handler
private void factQuoteQuery( ReceivePrivmsg event )
{
Matcher matcher = queryPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String noun = BaseNick.toBaseNick( matcher.group( 2 ) );
// check if quote/fact needs to be piped to jaziz
boolean jaziz = event.getText().lastIndexOf( "!jaziz" ) >= 0;
if ( "fact".equals( mode ) )
{
if ( !replyWithFact( noun, event, jaziz ) )
{
event.reply( noun + " has no facts." );
}
}
else
{
if ( !replyWithQuote( noun, event, jaziz ) )
{
event.reply( noun + " has no quotes." );
}
}
}
}
@Handler
private void factQuoteSearch( ReceivePrivmsg event )
{
Matcher matcher = searchPattern.matcher( event.getText() );
if ( matcher.matches() )
{
String mode = matcher.group( 1 );
String noun = BaseNick.toBaseNick( matcher.group( 2 ) );
String regex = matcher.group( 3 );
// check if quote/fact needs to be piped to jaziz
int jazizIdx = regex.lastIndexOf( "!jaziz" );
boolean jaziz = jazizIdx >= 0;
if ( jaziz )
{
regex = regex.substring( 0, jazizIdx - 1 );
}
try
{
// Case insensitive added automatically, use (?-i) in a message to reenable case sensitivity
Pattern pattern = Pattern.compile( "(?i)" + regex );
if ( "fact".equals( mode ) )
{
Set<String> texts = QuoteFactDB.getFacts( noun );
if ( texts != null )
{
String randomMatch = regexSetSearch( texts, pattern );
if ( randomMatch != null )
{
if ( jaziz )
{
try
{
event.reply( formatFactReply( noun, Jaziz.jazizify( randomMatch ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatFactReply( noun, randomMatch ) );
}
}
else
{
event.reply( noun + " has no matching facts." );
}
}
else
{
event.reply( noun + " has no facts." );
}
}
else
{
Set<String> texts = QuoteFactDB.getQuotes( noun );
if ( texts != null )
{
String randomMatch = regexSetSearch( texts, pattern );
if ( randomMatch != null )
{
if ( jaziz )
{
try
{
event.reply( formatQuoteReply( noun, Jaziz.jazizify( randomMatch ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatQuoteReply( noun, randomMatch ) );
}
}
else
{
event.reply( noun + " has no matching quotes." );
}
}
else
{
event.reply( noun + " has no quotes." );
}
}
}
catch ( PatternSyntaxException e )
{
event.reply( "Syntax error in regex pattern" );
}
}
}
private static String regexSetSearch( Set<String> texts, Pattern pattern )
{
List<String> matchingTexts = new LinkedList<String>();
for ( String text : texts )
{
Matcher matcher = pattern.matcher( text );
if ( matcher.find() )
{
matchingTexts.add( text );
}
}
int matchCount = matchingTexts.size();
if ( matchCount > 0 )
{
int selection = random.nextInt( matchCount );
return matchingTexts.get( selection );
}
else
{
return null;
}
}
@Handler
private void announceFactOrQuoteOnJoin( ReceiveJoin event )
{
String baseNick = BaseNick.toBaseNick( event.getJoiner() );
if ( !Ignore.getOnJoin().contains( baseNick ) )
{
if ( random.nextBoolean() )
{
if ( !replyWithQuote( baseNick, event, false ) )
replyWithFact( baseNick, event, false );
}
else
{
if ( !replyWithFact( baseNick, event, false ) )
replyWithQuote( baseNick, event, false );
}
}
}
private static boolean replyWithFact( String noun, Replyable event, boolean jaziz )
{
String text = QuoteFactDB.getRandFact( noun );
if ( text != null )
{
if ( ChurchDB.getDonorRank( noun ) != null && ChurchDB.getDonorRank( noun ) < 4 )
{
String title = ChurchDB.getDonorTitle( noun );
if ( title.trim().length() > 0 )
{
noun = title;
}
}
String replyText = formatFactReply( noun, text );
if ( jaziz )
{
try
{
event.reply( formatFactReply( noun, Jaziz.jazizify( text ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatFactReply( noun, text ) );
}
return true;
}
return false;
}
private static String formatFactReply( String noun, String fact )
{
return noun + " " + fact;
}
private static boolean replyWithQuote( String noun, Replyable event, boolean jaziz )
{
String text = QuoteFactDB.getRandQuote( noun );
if ( text != null )
{
if ( ChurchDB.getDonorRank( noun ) != null && ChurchDB.getDonorRank( noun ) < 4 )
{
String title = ChurchDB.getDonorTitle( noun );
if ( title.trim().length() > 0 )
{
noun = title;
}
}
if ( jaziz )
{
try
{
event.reply( formatQuoteReply( noun, Jaziz.jazizify( text ) ) );
}
catch ( Exception e )
{
event.reply( "eeeh" );
e.printStackTrace();
}
}
else
{
event.reply( formatQuoteReply( noun, text ) );
}
return true;
}
return false;
}
private static String formatQuoteReply( String noun, String quote )
{
return noun + " once said, \"" + quote + "\".";
}
/**
* Removes all specified leading and trailing characters in the array charsToRemove.
*
* @param input The string to process
* @param charsToRemove All characters to remove, treated as a set
* @return A copy of the input String with the characters removed
* @see String.trim()
*/
private static String trimChars( String input, String charsToRemove )
{
char[] value = input.toCharArray();
char[] rmChars = charsToRemove.toCharArray();
Arrays.sort( rmChars );
int len = value.length;
int st = 0;
while ( ( st < len ) && ( Arrays.binarySearch( rmChars, value[st] ) >= 0 ) )
{
st++;
}
while ( ( st < len ) && ( Arrays.binarySearch( rmChars, value[len - 1] ) >= 0 ) )
{
len--;
}
return new String( input.substring( st, len ) );
}
}
| add comment to explain searchPattern in QuotesAndFacts
| vilebot/src/main/java/com/oldterns/vilebot/handlers/user/QuotesAndFacts.java | add comment to explain searchPattern in QuotesAndFacts |
|
Java | mit | 06fc5eecf80df9360894e971c8513faefdc8086e | 0 | iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable,iontorrent/Torrent-Variant-Caller-stable | /*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.playground.gatk.walkers.annotator;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.Map.Entry;
import org.broad.tribble.vcf.VCFHeader;
import org.broad.tribble.vcf.VCFHeaderLine;
import org.broad.tribble.vcf.VCFRecord;
import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceOrderedDataSource;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.VariantContextAdaptors;
import org.broadinstitute.sting.gatk.refdata.features.annotator.AnnotatorInputTableCodec;
import org.broadinstitute.sting.gatk.walkers.By;
import org.broadinstitute.sting.gatk.walkers.DataSource;
import org.broadinstitute.sting.gatk.walkers.RodWalker;
import org.broadinstitute.sting.gatk.walkers.TreeReducible;
import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.StingException;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.genotype.vcf.VCFUtils;
import org.broadinstitute.sting.utils.genotype.vcf.VCFWriter;
/**
* Annotates variant calls with information from user-specified tabular files.
*
* For details, see: http://www.broadinstitute.org/gsa/wiki/index.php/GenomicAnnotator
*/
//@Requires(value={DataSource.READS, DataSource.REFERENCE},referenceMetaData=@RMD(name="variant",type=VariationRod.class))
//@Allows(value={DataSource.READS, DataSource.REFERENCE})
//@Reference(window=@Window(start=-50,stop=50))
@By(DataSource.REFERENCE)
public class GenomicAnnotator extends RodWalker<LinkedList<VCFRecord>, LinkedList<VCFRecord>> implements TreeReducible<LinkedList<VCFRecord>> {
@Argument(fullName="vcfOutput", shortName="vcf", doc="VCF file to which all variants should be written with annotations", required=true)
protected File VCF_OUT;
@Argument(fullName="sampleName", shortName="sample", doc="The sample (NA-ID) corresponding to the variant input (for non-VCF input only)", required=false)
protected String sampleName = null;
@Argument(fullName="select", shortName="s", doc="Optionally specifies which subset of columns from which -B inputs should be used for annotations. For example, -B mydbsnp,AnnotatorInputTable,/path/to/mydbsnp.txt -B mytable,AnnotatorInputTable,/path/mytable.txt -s mydbsnp.avHet,mydbsnp.name,mytable.column3 will cause annotations to only be generated from the 3 columns specified using -s.", required=false)
protected String[] SELECT_COLUMNS = {};
@Argument(fullName="join", shortName="J", doc="Optionally specifies a file and column within that file that should be LEFT-JOIN'ed to a column in a previously-specified file. The file provided to -J must be tab-delimited, with the first non-comment/non-empty line containing column names. (example: -B name,AnnotatorInputTable,/path/to/file1 -J name2,/path/to/file2,name.columnName=name2.columnName2 - this will join the table in file2 to the table in file1) ", required=false)
protected String[] JOIN_ARGS = {};
@Argument(fullName="oneToMany", shortName="m", doc="If more than one record from the same file matches a particular locus (for example, multiple dbSNP records with the same position), create multiple entries in the ouptut VCF file - one for each match. If a particular tabular file has J matches, and another tabular file has K matches for a given locus, then J*K output VCF records will be generated - one for each pair of K, J. If this flag is not provided, the multiple records are still generated, but they are stored in the INFO field of a single output VCF record, with their annotation keys differentiated by appending '_i' with i varying from 1 to K*J. ", required=false)
protected Boolean ONE_TO_MANY = false;
private VCFWriter vcfWriter;
private VariantAnnotatorEngine engine;
private boolean strict = true;
private boolean multiThreadedMode = false; //whether map will be called by more than one thread.
/**
* Prepare the output file and the list of available features.
*/
public void initialize() {
multiThreadedMode = getToolkit().getArguments().numberOfThreads > 1;
// get the list of all sample names from the various VCF input rods
TreeSet<String> samples = new TreeSet<String>();
SampleUtils.getUniquifiedSamplesFromRods(getToolkit(), samples, new HashMap<Pair<String, String>, String>());
//read all ROD file headers and construct a set of all column names to be used for validation of command-line args
final Set<String> allFullyQualifiedColumnNames = new LinkedHashSet<String>();
final Set<String> allBindingNames = new LinkedHashSet<String>();
try {
for(ReferenceOrderedDataSource ds : getToolkit().getRodDataSources()) {
if(! ds.getReferenceOrderedData().getType().equals(AnnotatorInputTableCodec.class)) {
continue; //skip all non-AnnotatorInputTable files.
}
final String bindingName = ds.getName();
allBindingNames.add(bindingName);
final ArrayList<String> header = AnnotatorInputTableCodec.readHeader(ds.getReferenceOrderedData().getFile());
for(String columnName : header) {
allFullyQualifiedColumnNames.add(bindingName + "." + columnName);
}
}
} catch(IOException e) {
throw new StingException("Failed when attempting to read file header. ", e);
}
//parse the JOIN_COLUMNS args, read in the specified files, and validate column names in the = relation. This end result of this loop is to populate the List of joinTables with one entry per -J arg.
final List<JoinTable> joinTables = new LinkedList<JoinTable>();
for(String joinArg : JOIN_ARGS) {
//parse the tokens
final String[] arg = joinArg.split(",");
if(arg.length != 3) {
throw new StingException("The following -J arg: \"" + joinArg + "\" must contain 3 comma-separated values. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
final String bindingName = arg[0];
final String filename = arg[1];
final String columnsToJoin = arg[2];
if(allBindingNames.contains(bindingName)) {
throw new StingException("The name \"" + bindingName + "\" in the -J arg: \"" + joinArg + "\" has already been used.");
}
String[] splitOnEquals = columnsToJoin.split("=+");
if(splitOnEquals.length != 2) {
throw new StingException("The -J arg: \"" + joinArg + "\" must specify the columns to join on. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
String[] splitOnDot1 = splitOnEquals[0].split("\\.");
String[] splitOnDot2 = splitOnEquals[1].split("\\.");
if(splitOnDot1.length != 2 || splitOnDot2.length != 2) {
throw new StingException("The -J arg: \"" + joinArg + "\" must fully specify the columns to join on. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
final String bindingName1 = splitOnDot1[0];
final String columnName1 = splitOnDot1[1];
final String bindingName2 = splitOnDot2[0];
final String columnName2 = splitOnDot2[1];
//figure out which of the 2 binding names within the = relation matches the -J bindingName
final String localBindingName = bindingName; //alias
final String localColumnName;
final String externalBindingName;
final String externalColumnName;
if(bindingName1.equals(bindingName)) {
localColumnName = columnName1;
externalBindingName = bindingName2;
externalColumnName = columnName2;
} else if(bindingName2.equals(bindingName)) {
localColumnName = columnName2;
externalBindingName = bindingName1;
externalColumnName = columnName1;
} else {
throw new StingException("The -J arg: \"" + joinArg + "\" must fully specify the columns to join on. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
//validate externalColumnName
final String fullyQualifiedExternalColumnName = externalBindingName + '.' + externalColumnName;
if( !allFullyQualifiedColumnNames.contains(fullyQualifiedExternalColumnName) ) {
throw new StingException("The -J arg: \"" + joinArg + "\" specifies an unknown column name: \"" + fullyQualifiedExternalColumnName + "\"");
}
//read in the file contents into a JoinTable object
final JoinTable joinTable = new JoinTable();
joinTable.parseFromFile(filename, localBindingName, localColumnName, externalBindingName, externalColumnName, strict);
joinTables.add(joinTable);
//validate localColumnName, and add all column names in this file to the list of allFullyQualifiedColumnNames so that they can be referenced from subsequent -J args.
final List<String> columnNames = joinTable.getColumnNames();
final List<String> fullyQualifiedColumnNames = new LinkedList<String>();
boolean found = false;
for(int i = 0; i < columnNames.size(); i++) {
final String columnName = columnNames.get(i);
if(columnName.equals(localColumnName)) {
found = true;
}
fullyQualifiedColumnNames.add(localBindingName + '.' + columnName);
}
if(!found) {
throw new StingException("The -J arg: \"" + joinArg + "\" specifies an unknown column name: \"" + localColumnName + "\". It's not one of the column names in the header " + columnNames + " of the file: " + filename);
}
allFullyQualifiedColumnNames.addAll(fullyQualifiedColumnNames);
}
//parse the SELECT_COLUMNS arg and validate the column names
List<String> parsedSelectColumns = new LinkedList<String>();
for(String token : SELECT_COLUMNS) {
parsedSelectColumns.addAll(Arrays.asList(token.split(",")));
}
SELECT_COLUMNS = parsedSelectColumns.toArray(SELECT_COLUMNS);
for(String columnName : SELECT_COLUMNS) {
if(!allFullyQualifiedColumnNames.contains(columnName)) {
throw new StingException("The column name '" + columnName + "' provided to -s doesn't match any of the column names in any of the -B files. Here is the list of available column names: " + allFullyQualifiedColumnNames);
}
}
//instanciate the VariantAnnotatorEngine
engine = new VariantAnnotatorEngine(getToolkit(), new String[] { }, new String[] { "GenomicAnnotation" });
engine.setOneToMany( Boolean.TRUE.equals( ONE_TO_MANY ) );
engine.setRequestedColumns(SELECT_COLUMNS);
engine.setJoinTables(joinTables);
// setup the header fields
Set<VCFHeaderLine> hInfo = new HashSet<VCFHeaderLine>();
hInfo.addAll(VCFUtils.getHeaderFields(getToolkit()));
hInfo.add(new VCFHeaderLine("source", "Annotator"));
hInfo.add(new VCFHeaderLine("annotatorReference", getToolkit().getArguments().referenceFile.getName()));
hInfo.addAll(engine.getVCFAnnotationDescriptions());
vcfWriter = new VCFWriter(VCF_OUT);
VCFHeader vcfHeader = new VCFHeader(hInfo, samples);
vcfWriter.writeHeader(vcfHeader);
}
/**
* Initialize the number of loci processed to zero.
*
* @return 0
*/
public LinkedList<VCFRecord> reduceInit() { return new LinkedList<VCFRecord>(); }
/**
* We want reads that span deletions
*
* @return true
*/
public boolean includeReadsWithDeletionAtLoci() { return true; }
/**
* For each site of interest, annotate based on the requested annotation types
*
* @param tracker the meta-data tracker
* @param ref the reference base
* @param context the context for the given locus
* @return 1 if the locus was successfully processed, 0 if otherwise
*/
public LinkedList<VCFRecord> map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
LinkedList<VCFRecord> result = new LinkedList<VCFRecord>();
if ( tracker == null )
return result;
List<Object> rods = tracker.getReferenceMetaData("variant");
// ignore places where we don't have a variant
if ( rods.size() == 0 )
return result;
Object variant = rods.get(0);
if( BaseUtils.isNBase(ref.getBase()) ) {
return result; //TODO Currently, VariantContextAdaptors.toVCF(annotatedVC, ref.getBase()) fails when base is 'N'. is this right?
}
VariantContext vc = VariantContextAdaptors.toVariantContext("variant", variant, ref);
if ( vc == null )
return result;
// if the reference base is not ambiguous, we can annotate
Collection<VariantContext> annotatedVCs = Arrays.asList(vc);
if ( BaseUtils.simpleBaseToBaseIndex(ref.getBase()) != -1 ) {
Map<String, StratifiedAlignmentContext> stratifiedContexts = StratifiedAlignmentContext.splitContextBySample(context.getBasePileup());
if ( stratifiedContexts != null ) {
annotatedVCs = engine.annotateContext(tracker, ref, stratifiedContexts, vc);
}
}
if(multiThreadedMode) {
//keep results in memory, only writing them in onTraversalDone(..) after they have been merged via treeReduce(..)
for(VariantContext annotatedVC : annotatedVCs ) {
result.add(VariantContextAdaptors.toVCF(annotatedVC, ref.getBase()));
}
} else {
//write results to disk immediately
for(VariantContext annotatedVC : annotatedVCs ) {
vcfWriter.addRecord(VariantContextAdaptors.toVCF(annotatedVC, ref.getBase()));
}
}
return result;
}
/**
* Merge lists.
*
* @param value result of the map.
* @param sum accumulator for the reduce.
* @return the new number of loci processed.
*/
public LinkedList<VCFRecord> reduce(LinkedList<VCFRecord> value, LinkedList<VCFRecord> sum) {
sum.addAll(value);
return sum;
}
/**
* Merge lists.
*/
public LinkedList<VCFRecord> treeReduce(LinkedList<VCFRecord> lhs, LinkedList<VCFRecord> rhs) {
lhs.addAll(rhs);
return lhs;
}
/**
* Tell the user the number of loci processed and close out the new variants file.
*
* @param result the number of loci seen.
*/
public void onTraversalDone(LinkedList<VCFRecord> totalOutputVCFRecords) {
if(multiThreadedMode) {
//finally write results to disk
for(VCFRecord vcfRecord : totalOutputVCFRecords ) {
vcfWriter.addRecord(vcfRecord);
}
}
//out.printf("Generated %d annotated VCF records.\n", totalOutputVCFRecords);
Map<String, Integer> inputTableHitCounter = engine.getInputTableHitCounter();
for(Entry<String, Integer> e : inputTableHitCounter.entrySet()) {
final String bindingName = e.getKey();
final int counter = e.getValue();
//final float percent = 100 * counter /(float) totalOutputVCFRecords;
//out.printf(" %-6.1f%% (%d) annotated with %s.\n", percent, counter, bindingName );
out.printf(" %d annotated with %s.\n", counter, bindingName );
}
vcfWriter.close();
}
}
| java/src/org/broadinstitute/sting/playground/gatk/walkers/annotator/GenomicAnnotator.java | /*
* Copyright (c) 2010 The Broad Institute
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use,
* copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the
* Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
* OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
* HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
* WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR
* THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
package org.broadinstitute.sting.playground.gatk.walkers.annotator;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
import java.util.Map.Entry;
import org.broad.tribble.vcf.VCFHeader;
import org.broad.tribble.vcf.VCFHeaderLine;
import org.broad.tribble.vcf.VCFRecord;
import org.broadinstitute.sting.commandline.Argument;
import org.broadinstitute.sting.gatk.contexts.AlignmentContext;
import org.broadinstitute.sting.gatk.contexts.ReferenceContext;
import org.broadinstitute.sting.gatk.contexts.StratifiedAlignmentContext;
import org.broadinstitute.sting.gatk.contexts.variantcontext.VariantContext;
import org.broadinstitute.sting.gatk.datasources.simpleDataSources.ReferenceOrderedDataSource;
import org.broadinstitute.sting.gatk.refdata.RefMetaDataTracker;
import org.broadinstitute.sting.gatk.refdata.VariantContextAdaptors;
import org.broadinstitute.sting.gatk.refdata.features.annotator.AnnotatorInputTableCodec;
import org.broadinstitute.sting.gatk.walkers.By;
import org.broadinstitute.sting.gatk.walkers.DataSource;
import org.broadinstitute.sting.gatk.walkers.RodWalker;
import org.broadinstitute.sting.gatk.walkers.annotator.VariantAnnotatorEngine;
import org.broadinstitute.sting.utils.BaseUtils;
import org.broadinstitute.sting.utils.SampleUtils;
import org.broadinstitute.sting.utils.StingException;
import org.broadinstitute.sting.utils.collections.Pair;
import org.broadinstitute.sting.utils.genotype.vcf.VCFUtils;
import org.broadinstitute.sting.utils.genotype.vcf.VCFWriter;
/**
* Annotates variant calls with information from user-specified tabular files.
*
* For details, see: http://www.broadinstitute.org/gsa/wiki/index.php/GenomicAnnotator
*/
//@Requires(value={DataSource.READS, DataSource.REFERENCE},referenceMetaData=@RMD(name="variant",type=VariationRod.class))
//@Allows(value={DataSource.READS, DataSource.REFERENCE})
//@Reference(window=@Window(start=-50,stop=50))
@By(DataSource.REFERENCE)
public class GenomicAnnotator extends RodWalker<Integer, Integer> {
@Argument(fullName="vcfOutput", shortName="vcf", doc="VCF file to which all variants should be written with annotations", required=true)
protected File VCF_OUT;
@Argument(fullName="sampleName", shortName="sample", doc="The sample (NA-ID) corresponding to the variant input (for non-VCF input only)", required=false)
protected String sampleName = null;
@Argument(fullName="select", shortName="s", doc="Optionally specifies which subset of columns from which -B inputs should be used for annotations. For example, -B mydbsnp,AnnotatorInputTable,/path/to/mydbsnp.txt -B mytable,AnnotatorInputTable,/path/mytable.txt -s mydbsnp.avHet,mydbsnp.name,mytable.column3 will cause annotations to only be generated from the 3 columns specified using -s.", required=false)
protected String[] SELECT_COLUMNS = {};
@Argument(fullName="join", shortName="J", doc="Optionally specifies a file and column within that file that should be LEFT-JOIN'ed to a column in a previously-specified file. The file provided to -J must be tab-delimited, with the first non-comment/non-empty line containing column names. (example: -B name,AnnotatorInputTable,/path/to/file1 -J name2,/path/to/file2,name.columnName=name2.columnName2 - this will join the table in file2 to the table in file1) ", required=false)
protected String[] JOIN_ARGS = {};
@Argument(fullName="oneToMany", shortName="m", doc="If more than one record from the same file matches a particular locus (for example, multiple dbSNP records with the same position), create multiple entries in the ouptut VCF file - one for each match. If a particular tabular file has J matches, and another tabular file has K matches for a given locus, then J*K output VCF records will be generated - one for each pair of K, J. If this flag is not provided, the multiple records are still generated, but they are stored in the INFO field of a single output VCF record, with their annotation keys differentiated by appending '_i' with i varying from 1 to K*J. ", required=false)
protected Boolean ONE_TO_MANY = false;
private VCFWriter vcfWriter;
private VariantAnnotatorEngine engine;
private boolean strict = true;
/**
* Prepare the output file and the list of available features.
*/
public void initialize() {
// get the list of all sample names from the various VCF input rods
TreeSet<String> samples = new TreeSet<String>();
SampleUtils.getUniquifiedSamplesFromRods(getToolkit(), samples, new HashMap<Pair<String, String>, String>());
//read all ROD file headers and construct a set of all column names to be used for validation of command-line args
final Set<String> allFullyQualifiedColumnNames = new LinkedHashSet<String>();
final Set<String> allBindingNames = new LinkedHashSet<String>();
try {
for(ReferenceOrderedDataSource ds : getToolkit().getRodDataSources()) {
if(! ds.getReferenceOrderedData().getType().equals(AnnotatorInputTableCodec.class)) {
continue; //skip all non-AnnotatorInputTable files.
}
final String bindingName = ds.getName();
allBindingNames.add(bindingName);
final ArrayList<String> header = AnnotatorInputTableCodec.readHeader(ds.getReferenceOrderedData().getFile());
for(String columnName : header) {
allFullyQualifiedColumnNames.add(bindingName + "." + columnName);
}
}
} catch(IOException e) {
throw new StingException("Failed when attempting to read file header. ", e);
}
//parse the JOIN_COLUMNS args, read in the specified files, and validate column names in the = relation. This end result of this loop is to populate the List of joinTables with one entry per -J arg.
final List<JoinTable> joinTables = new LinkedList<JoinTable>();
for(String joinArg : JOIN_ARGS) {
//parse the tokens
final String[] arg = joinArg.split(",");
if(arg.length != 3) {
throw new StingException("The following -J arg: \"" + joinArg + "\" must contain 3 comma-separated values. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
final String bindingName = arg[0];
final String filename = arg[1];
final String columnsToJoin = arg[2];
if(allBindingNames.contains(bindingName)) {
throw new StingException("The name \"" + bindingName + "\" in the -J arg: \"" + joinArg + "\" has already been used.");
}
String[] splitOnEquals = columnsToJoin.split("=+");
if(splitOnEquals.length != 2) {
throw new StingException("The -J arg: \"" + joinArg + "\" must specify the columns to join on. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
String[] splitOnDot1 = splitOnEquals[0].split("\\.");
String[] splitOnDot2 = splitOnEquals[1].split("\\.");
if(splitOnDot1.length != 2 || splitOnDot2.length != 2) {
throw new StingException("The -J arg: \"" + joinArg + "\" must fully specify the columns to join on. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
final String bindingName1 = splitOnDot1[0];
final String columnName1 = splitOnDot1[1];
final String bindingName2 = splitOnDot2[0];
final String columnName2 = splitOnDot2[1];
//figure out which of the 2 binding names within the = relation matches the -J bindingName
final String localBindingName = bindingName; //alias
final String localColumnName;
final String externalBindingName;
final String externalColumnName;
if(bindingName1.equals(bindingName)) {
localColumnName = columnName1;
externalBindingName = bindingName2;
externalColumnName = columnName2;
} else if(bindingName2.equals(bindingName)) {
localColumnName = columnName2;
externalBindingName = bindingName1;
externalColumnName = columnName1;
} else {
throw new StingException("The -J arg: \"" + joinArg + "\" must fully specify the columns to join on. (ex: -J name,/path/to/file,name.columnName=name2.columnName2)");
}
//validate externalColumnName
final String fullyQualifiedExternalColumnName = externalBindingName + '.' + externalColumnName;
if( !allFullyQualifiedColumnNames.contains(fullyQualifiedExternalColumnName) ) {
throw new StingException("The -J arg: \"" + joinArg + "\" specifies an unknown column name: \"" + fullyQualifiedExternalColumnName + "\"");
}
//read in the file contents into a JoinTable object
final JoinTable joinTable = new JoinTable();
joinTable.parseFromFile(filename, localBindingName, localColumnName, externalBindingName, externalColumnName, strict);
joinTables.add(joinTable);
//validate localColumnName, and add all column names in this file to the list of allFullyQualifiedColumnNames so that they can be referenced from subsequent -J args.
final List<String> columnNames = joinTable.getColumnNames();
final List<String> fullyQualifiedColumnNames = new LinkedList<String>();
boolean found = false;
for(int i = 0; i < columnNames.size(); i++) {
final String columnName = columnNames.get(i);
if(columnName.equals(localColumnName)) {
found = true;
}
fullyQualifiedColumnNames.add(localBindingName + '.' + columnName);
}
if(!found) {
throw new StingException("The -J arg: \"" + joinArg + "\" specifies an unknown column name: \"" + localColumnName + "\". It's not one of the column names in the header " + columnNames + " of the file: " + filename);
}
allFullyQualifiedColumnNames.addAll(fullyQualifiedColumnNames);
}
//parse the SELECT_COLUMNS arg and validate the column names
List<String> parsedSelectColumns = new LinkedList<String>();
for(String token : SELECT_COLUMNS) {
parsedSelectColumns.addAll(Arrays.asList(token.split(",")));
}
SELECT_COLUMNS = parsedSelectColumns.toArray(SELECT_COLUMNS);
for(String columnName : SELECT_COLUMNS) {
if(!allFullyQualifiedColumnNames.contains(columnName)) {
throw new StingException("The column name '" + columnName + "' provided to -s doesn't match any of the column names in any of the -B files. Here is the list of available column names: " + allFullyQualifiedColumnNames);
}
}
//instanciate the VariantAnnotatorEngine
engine = new VariantAnnotatorEngine(getToolkit(), new String[] { }, new String[] { "GenomicAnnotation" });
engine.setOneToMany( Boolean.TRUE.equals( ONE_TO_MANY ) );
engine.setRequestedColumns(SELECT_COLUMNS);
engine.setJoinTables(joinTables);
// setup the header fields
Set<VCFHeaderLine> hInfo = new HashSet<VCFHeaderLine>();
hInfo.addAll(VCFUtils.getHeaderFields(getToolkit()));
hInfo.add(new VCFHeaderLine("source", "Annotator"));
hInfo.add(new VCFHeaderLine("annotatorReference", getToolkit().getArguments().referenceFile.getName()));
hInfo.addAll(engine.getVCFAnnotationDescriptions());
vcfWriter = new VCFWriter(VCF_OUT);
VCFHeader vcfHeader = new VCFHeader(hInfo, samples);
vcfWriter.writeHeader(vcfHeader);
}
/**
* Initialize the number of loci processed to zero.
*
* @return 0
*/
public Integer reduceInit() { return 0; }
/**
* We want reads that span deletions
*
* @return true
*/
public boolean includeReadsWithDeletionAtLoci() { return true; }
/**
* For each site of interest, annotate based on the requested annotation types
*
* @param tracker the meta-data tracker
* @param ref the reference base
* @param context the context for the given locus
* @return 1 if the locus was successfully processed, 0 if otherwise
*/
public Integer map(RefMetaDataTracker tracker, ReferenceContext ref, AlignmentContext context) {
if ( tracker == null )
return 0;
List<Object> rods = tracker.getReferenceMetaData("variant");
// ignore places where we don't have a variant
if ( rods.size() == 0 )
return 0;
Object variant = rods.get(0);
if( BaseUtils.isNBase(ref.getBase()) ) {
return 0; //TODO Currently, VariantContextAdaptors.toVCF(annotatedVC, ref.getBase()) fails when base is 'N'. is this right?
}
VariantContext vc = VariantContextAdaptors.toVariantContext("variant", variant, ref);
if ( vc == null )
return 0;
// if the reference base is not ambiguous, we can annotate
Collection<VariantContext> annotatedVCs = Arrays.asList(vc);
if ( BaseUtils.simpleBaseToBaseIndex(ref.getBase()) != -1 ) {
Map<String, StratifiedAlignmentContext> stratifiedContexts = StratifiedAlignmentContext.splitContextBySample(context.getBasePileup());
if ( stratifiedContexts != null ) {
annotatedVCs = engine.annotateContext(tracker, ref, stratifiedContexts, vc);
}
}
if ( variant instanceof VCFRecord) { //TODO is this if-statement necessary?
for(VariantContext annotatedVC : annotatedVCs ) {
vcfWriter.addRecord(VariantContextAdaptors.toVCF(annotatedVC, ref.getBase()));
}
}
return annotatedVCs.size();
}
/**
* Increment the number of loci processed.
*
* @param value result of the map.
* @param sum accumulator for the reduce.
* @return the new number of loci processed.
*/
public Integer reduce(Integer value, Integer sum) {
return sum + value;
}
/**
* Tell the user the number of loci processed and close out the new variants file.
*
* @param result the number of loci seen.
*/
public void onTraversalDone(Integer totalOutputVCFRecords) {
out.printf("Generated %d annotated VCF records.\n", totalOutputVCFRecords);
Map<String, Integer> inputTableHitCounter = engine.getInputTableHitCounter();
for(Entry<String, Integer> e : inputTableHitCounter.entrySet()) {
final String bindingName = e.getKey();
final int counter = e.getValue();
final float percent = 100 * counter /(float) totalOutputVCFRecords;
out.printf(" %-6.1f%% (%d) annotated with %s.\n", percent, counter, bindingName );
}
vcfWriter.close();
}
}
| Implemented TreeReducible - if num threads > 1, the output will be accumulated in memory and written to a vcf file at the end - in onTraveralDone(..). If num threads == 1, things will work as before - where vcf records are written to disk as soon as they are computed with map(..).
git-svn-id: 4561c0a8f080806b19201efb9525134c00b76d40@3530 348d0f76-0448-11de-a6fe-93d51630548a
| java/src/org/broadinstitute/sting/playground/gatk/walkers/annotator/GenomicAnnotator.java | Implemented TreeReducible - if num threads > 1, the output will be accumulated in memory and written to a vcf file at the end - in onTraveralDone(..). If num threads == 1, things will work as before - where vcf records are written to disk as soon as they are computed with map(..). |
|
Java | mit | f898fe61e767ce486ab242ef843ed72525ca47e4 | 0 | ZeroPage/team6-titan-2015 | package model;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeNode;
public class TreeData {
private TitanDSM dsmData;
private ClusterData cluster;
//initializing with only DSM
public TreeData(File dsmFile) throws IOException, WrongDSMFormatException {
dsmData = new TitanDSM(dsmFile);
cluster = new ClusterData(this.dsmData);
}
public TreeData(int size) throws IOException, WrongDSMFormatException, NotPositiveException {
dsmData = new TitanDSM(size);
cluster = new ClusterData(this.dsmData);
}
//load clsx, and rebuild the data tree structure
public void loadClusterData(File clsxFile) throws IOException, WrongXMLNamespaceException {
cluster = new ClusterData(clsxFile);
cluster.refresh(this.dsmData);
}
public boolean getDSMValue(DefaultMutableTreeNode rowElement, DefaultMutableTreeNode columnElement) {
boolean result = false;
if(rowElement.getAllowsChildren()) {
if(columnElement.getAllowsChildren()) {
result = getGroupGroupValue(rowElement,columnElement);
} else {
result = getGroupItemValue(rowElement, columnElement);
}
} else {
if(columnElement.getAllowsChildren()) {
result = getItemGroupValue(columnElement, rowElement);
} else {
String row = rowElement.getUserObject().toString();
String column = columnElement.getUserObject().toString();
result = dsmData.getData(row, column);
}
}
return result;
}
private boolean getGroupGroupValue(DefaultMutableTreeNode rGroup, DefaultMutableTreeNode cGroup) {
boolean result = false;
for(int i=0;i<rGroup.getChildCount();i++) {
DefaultMutableTreeNode rItem = (DefaultMutableTreeNode)rGroup.getChildAt(i);
String row = rItem.getUserObject().toString();
for(int j=0;j<cGroup.getChildCount();j++) {
DefaultMutableTreeNode cItem = (DefaultMutableTreeNode)cGroup.getChildAt(j);
String column = cItem.getUserObject().toString();
if(result = dsmData.getData(row, column)) {
return result;
}
}
}
return result;
}
private boolean getGroupItemValue(DefaultMutableTreeNode group, DefaultMutableTreeNode element) {
boolean result = false;
for(int i=0;i<group.getChildCount();i++) {
DefaultMutableTreeNode item = (DefaultMutableTreeNode)group.getChildAt(i);
String row = item.getUserObject().toString();
String column = element.getUserObject().toString();
if(result = dsmData.getData(row,column)) {
return result;
}
}
return result;
}
private boolean getItemGroupValue(DefaultMutableTreeNode element, DefaultMutableTreeNode group) {
boolean result = false;
for(int i=0;i<group.getChildCount();i++) {
DefaultMutableTreeNode item = (DefaultMutableTreeNode)group.getChildAt(i);
String row = element.getUserObject().toString();
String column = item.getUserObject().toString();
if(result = dsmData.getData(row,column)) {
return result;
}
}
return result;
}
public void loadDSM(String dsmFileName) throws IOException, WrongDSMFormatException{
this.dsmData = new TitanDSM(new File(dsmFileName));
if(cluster == null) {
buildDefaultTree();
} else {
cluster.refresh(this.dsmData);
}
}
//rename the element(Group, Item both)
public void renameElement(DefaultMutableTreeNode currentNode, String newName) throws ItemAlreadyExistException, NodeNotFoundException {
if(!currentNode.getAllowsChildren()) {
dsmData.setName(newName, currentNode.getUserObject().toString());
}
cluster.renameNode(currentNode, newName);
}
public void repositionElement(DefaultMutableTreeNode elementNode,int newIndex) throws NodeNotFoundException {
cluster.moveNode(elementNode, newIndex);
//Does DSM has something to do with this method?
}
public void removeElement(DefaultMutableTreeNode elementNode) throws NodeNotFoundException {
if(elementNode.getAllowsChildren()) {
//Case 1: the element was group - subtree has to be deleted.
} else {
//Case 2: the element was item - delete only stated element.
}
cluster.deleteItem(elementNode);
}
public void addElement(DefaultMutableTreeNode groupNode, String itemName) throws NodeNotFoundException {
cluster.addItem(groupNode, itemName);
//DSM team, Plz add your codes that are needed.
}
public void groupElement(ArrayList<DefaultMutableTreeNode> elementList, String groupName) {
cluster.newGroupbyNode(elementList, groupName);
}
public void freeGroup(DefaultMutableTreeNode groupNode) throws NodeNotFoundException {
cluster.freeGroup(groupNode);
}
//build temporary cluster with DSM only.
private DefaultMutableTreeNode buildDefaultTree() {
DefaultMutableTreeNode root = new DefaultMutableTreeNode("ROOT",true);
for(int i=0;i<this.dsmData.getSize();i++) {
root.add(new DefaultMutableTreeNode(this.dsmData.getName(i),false));
}
return root;
}
public DefaultMutableTreeNode getTree(){
return this.cluster.getTree();
}
public void saveDSMData(File dsmFile) throws IOException{
this.dsmData.saveToFile(dsmFile);
}
public void saveClusterData(File clusterFile) throws IOException{
this.cluster.saveClusterData(clusterFile);
}
public void saveData(File dsmFile, File clusterFile) throws IOException{
this.dsmData.saveToFile(dsmFile);
this.cluster.saveClusterData(clusterFile);
}
}
| src/model/TreeData.java | package model;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.TreeNode;
public class TreeData {
private TitanDSM dsmData;
private ClusterData cluster;
private TreeNode treeRoot;
//initializing with only DSM
public TreeData(File dsmFile) throws IOException, WrongDSMFormatException {
dsmData = new TitanDSM(dsmFile);
cluster = null;
treeRoot = buildDefaultTree();
}
//load clsx, and rebuild the data tree structure
public void loadClusterData(File clsxFile) throws IOException, WrongXMLNamespaceException {
cluster = new ClusterData(clsxFile);
cluster.refresh(this.dsmData);
treeRoot = cluster.getTree();
}
public boolean getGroupDSM(DefaultMutableTreeNode group, DefaultMutableTreeNode elem) {
boolean result = false;
if(!elem.getAllowsChildren()) {
for(int i=0;i<group.getChildCount();i++) {
DefaultMutableTreeNode groupItem = (DefaultMutableTreeNode)group.getChildAt(i);
String rowName = groupItem.getUserObject().toString();
String colName = elem.getUserObject().toString();
if(result = dsmData.getData(rowName,colName)) {
return result;
}
}
} else {
for(int i=0;i<group.getChildCount();i++) {
DefaultMutableTreeNode groupItem = (DefaultMutableTreeNode)group.getChildAt(i);
String rowName = groupItem.getUserObject().toString();
for(int j=0;j<elem.getChildCount();j++) {
DefaultMutableTreeNode elemItem = (DefaultMutableTreeNode)elem.getChildAt(j);
String colName = elemItem.getUserObject().toString();
if(result = dsmData.getData(rowName, colName)) {
return result;
}
}
}
}
return result;
}
public void loadDSM(String dsmFileName) throws IOException, WrongDSMFormatException{
this.dsmData = new TitanDSM(new File(dsmFileName));
if(cluster == null) {
buildDefaultTree();
} else {
cluster.refresh(this.dsmData);
}
}
public boolean getDSMvalue(DefaultMutableTreeNode row, DefaultMutableTreeNode col) {
String rowName = row.getUserObject().toString();
String colName = col.getUserObject().toString();
return dsmData.getData(rowName,colName);
}
//rename the element(Group, Item both)
public void renameElem(DefaultMutableTreeNode currentNode, String newName) throws ItemAlreadyExistException, NodeNotFoundException {
if(!currentNode.getAllowsChildren()) {
dsmData.setName(newName, currentNode.getUserObject().toString());
}
cluster.renameNode(currentNode, newName);
}
public void repositionElem(DefaultMutableTreeNode elemNode,int newIndex) throws NodeNotFoundException {
cluster.moveNode(elemNode, newIndex);
//Does DSM has something to do with this method?
}
public void removeElem(DefaultMutableTreeNode elemNode) throws NodeNotFoundException {
if(elemNode.getAllowsChildren()) {
//Case 1: the element was group - subtree has to be deleted.
} else {
//Case 2: the element was item - delete only stated element.
}
cluster.deleteItem(elemNode);
}
public void addElem(DefaultMutableTreeNode groupNode, String itemName) throws NodeNotFoundException {
cluster.addItem(groupNode, itemName);
//DSM team, Plz add your codes that are needed.
}
public void groupElem(ArrayList<DefaultMutableTreeNode> elemList, String groupName) {
cluster.newGroupbyNode(elemList, groupName);
}
public void freeGroup(DefaultMutableTreeNode groupNode) throws NodeNotFoundException {
cluster.freeGroup(groupNode);
}
//build temporary cluster with DSM only.
private TreeNode buildDefaultTree() {
DefaultMutableTreeNode root = new DefaultMutableTreeNode("ROOT",true);
for(int i=0;i<this.dsmData.getSize();i++) {
root.add(new DefaultMutableTreeNode(this.dsmData.getName(i),false));
}
return root;
}
public TreeNode getTree(){
return this.treeRoot;
}
public void saveDSMData(File dsmFile) throws IOException{
this.dsmData.saveToFile(dsmFile);
}
public void saveClusterData(File clusterFile) throws IOException{
this.cluster.saveClusterData(clusterFile);
}
public void saveData(File dsmFile, File clusterFile) throws IOException{
this.dsmData.saveToFile(dsmFile);
this.cluster.saveClusterData(clusterFile);
}
}
| Total modification on TreeData
1) treeRoot is no use
2) building dsm with size is added
3) getting dsm value is much more efficient(Group, item both available)
4) Elem -> Element
| src/model/TreeData.java | Total modification on TreeData |
|
Java | mit | 36b9fa8b369757df7945d0b6439389708edf0b03 | 0 | bolinfest/chickenfoot,bolinfest/chickenfoot,bolinfest/chickenfoot,bolinfest/chickenfoot,bolinfest/chickenfoot | /*
* Chickenfoot end-user web automation system
*
* Copyright (c) 2004-2007 Massachusetts Institute of Technology
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Chickenfoot homepage: http://uid.csail.mit.edu/chickenfoot/
*/
package chickenfoot;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public final class ExportXpi {
private static final Map<String, String> TEMPLATE_TO_JAR_PATH;
static {
Map<String, String> map = new HashMap<String, String>();
map.put("contents.rdf", "content/contents.rdf");
map.put("overlay.xul", "content/overlay.xul");
map.put("chickenscratch.xul", "content/chickenscratch.xul");
TEMPLATE_TO_JAR_PATH = Collections.unmodifiableMap(map);
}
private static Map<String, String> TEMPLATE_TO_XPI_PATH;
static {
Map<String, String> map = new HashMap<String, String>();
map.put("Chickenfoot.js", "components/Chickenfoot.js");
map.put("Chicken-bypass.js", "components/Chicken-bypass.js");
// Don't copy this!
// The current thinking is that ChickenfootCommandLineHandler will
// interfere with Chickenfoot, if it is also installed.
// But be aware that we are not copying it.
// map.put("ChickenfootCommandLineHandler.js", "components/ChickenfootCommandLineHandler.js");
map.put("update.template.rdf", "update.rdf");
map.put("install.template.rdf", "install.rdf");
map.put("preferences.js", "defaults/preferences/preferences.js");
TEMPLATE_TO_XPI_PATH = map; //Collections.unmodifiableMap(map);
}
/** Utility class -- do not instantiate */
private ExportXpi() {
}
/**
*
* @param xmlStringsArray
* @param outputPath
* @param templateTagsArray
* @param extensionPath - may need to convert from file:///C:/... to
* something Java understands
* @param userFilesArray -of strings representing file pathnames
* @param iconPath
* @return
*/
public static String xpiTie(String xmlString, String outputPath, String[] templateTagsArray,
String extensionPath, String[] userFiles, String iconPath)
throws IOException {
// deserialize templateTagsArray into a map
Map<String, String> tagMap = new HashMap<String, String>();
for (int i = 0; i < templateTagsArray.length; i += 2) {
tagMap.put(templateTagsArray[i], templateTagsArray[i + 1]);
}
// populate templates in xpi-tie directory
File xpiTieDirectory = new File(extensionPath, "export");
File[] templateFiles = xpiTieDirectory.listFiles();
Map<String, String> fileName2populatedTemplate = new HashMap<String, String>();
for (File template : templateFiles) {
fileName2populatedTemplate.put(template.getName(),
populateTemplate(getFileContents(template), tagMap));
}
//only write the update.rdf file to disk if an update url was specified
if(tagMap.get("EXTENSION_URL") != null && !tagMap.get("EXTENSION_URL").equals("")) {
File extensionDir = (new File(outputPath)).getParentFile();
File updateRdf = new File(extensionDir, "update.rdf");
FileWriter w = new FileWriter(updateRdf);
w.write(fileName2populatedTemplate.get("update.template.rdf"));
w.flush();
w.close();
}
//take it out of the map so that it isn't added to the xpi file
TEMPLATE_TO_XPI_PATH.remove("update.template.rdf");
// write ASCII entries into JAR
File jarFile = File.createTempFile("output", ".jar");
JarOutputStream jarStream = new JarOutputStream(new FileOutputStream(
jarFile));
for (String fileName : TEMPLATE_TO_JAR_PATH.keySet()) {
String jarFilePath = TEMPLATE_TO_JAR_PATH.get(fileName);
JarEntry jarEntry = new JarEntry(jarFilePath);
jarStream.putNextEntry(jarEntry);
jarStream.write(fileName2populatedTemplate.get(fileName).getBytes());
jarStream.closeEntry();
}
//write logo to JAR
if ((iconPath != null) && (iconPath != "")) {
writeFileToJar(iconPath, jarStream, "", "icon.png");
}
else {
File chromeDirectory = new File(extensionPath, "chrome");
JarFile chickenfootChromeJar = new JarFile(new File(chromeDirectory, "chickenfoot.jar"));
JarEntry logo = chickenfootChromeJar.getJarEntry("skin/classic/beak-32.png");
JarEntry insertedLogo = new JarEntry("content/icon.png");
jarStream.putNextEntry(insertedLogo);
InputStream entryStream = chickenfootChromeJar.getInputStream(logo);
try {
// Allocate a buffer for reading the entry data.
byte[] buffer = new byte[1024];
int bytesRead;
// Read the entry data and write it to the output file.
while ((bytesRead = entryStream.read(buffer)) != -1) {
jarStream.write(buffer, 0, bytesRead);
}
} finally {
entryStream.close();
}
jarStream.closeEntry();
}
// write libraries into JAR
// TODO(mbolin): read libraries out of chickenfoot-java.jar instead of libraries directory
// so there does not have to be two copies of each library file in chickenfoot.xpi
File librariesDirectory = new File(extensionPath, "libraries");
File[] libraryFiles = librariesDirectory.listFiles();
for (File library : libraryFiles) {
insertLibraryFileIntoJar(library, jarStream, null);
}
jarStream.finish();
jarStream.close();
// write ASCII entries into XPI
File xpiFile = new File(outputPath);
ZipOutputStream zipStream = new ZipOutputStream(new FileOutputStream(
xpiFile));
for (String fileName : TEMPLATE_TO_XPI_PATH.keySet()) {
String xpiFilePath = TEMPLATE_TO_XPI_PATH.get(fileName);
String contents = fileName2populatedTemplate.get(fileName);
addStringToZip(contents, zipStream, xpiFilePath);
}
// write native libraries into XPI
File componentDirectory = new File(extensionPath, "components");
File[] componentFiles = componentDirectory.listFiles();
for (File component : componentFiles) {
String name = component.getName();
if (name.indexOf("ChickenSleep") != -1) {
addFileToZip(component, zipStream, "components", null);
}
}
//write user files, including trigger files, into XPI
for (int k=0; k<userFiles.length; k++) {
try {
File current = new File(userFiles[k]);
addFileToZip(current, zipStream, null, null);
}
catch(IOException err) {
continue;
}
}
//write triggers.xml into XPI
File tempFile = File.createTempFile("triggers", ".xml");
FileWriter writer = new FileWriter(tempFile);
writer.write(xmlString);
writer.close();
addFileToZip(tempFile, zipStream, null, "triggers.xml");
tempFile.delete();
// write JAR into XPI
addFileToZip(jarFile, zipStream, "chrome", "chickenfoot-xpi-tie.jar");
// copy the "java" directory into the XPI
{
String dirName = "java";
File dir = new File(extensionPath, dirName);
for (File file : dir.listFiles()) {
addFileToZip(file, zipStream, "java", null);
}
}
// close XPI
zipStream.finish();
zipStream.close();
return xpiFile.getCanonicalPath();
}
private static String populateTemplate(String template,
Map<String, String> tagMap) {
// TODO: eliminate this loop by using a lambda-replace
for (String tag : tagMap.keySet()) {
template = template.replaceAll("@" + tag + "@", tagMap.get(tag));
}
return template;
}
private static void insertLibraryFileIntoJar(File library, JarOutputStream jarStream, String prefix) throws IOException {
if (library.isDirectory()) {
File[] files = library.listFiles();
prefix = (prefix == null) ? library.getName() : prefix + "/" + library.getName();
for (File f : files) {
insertLibraryFileIntoJar(f, jarStream, prefix);
}
} else {
String path = library.getName();
if (prefix != null) path = prefix + "/" + path;
path = "content/libraries/" + path;
JarEntry jarEntry = new JarEntry(path);
jarStream.putNextEntry(jarEntry);
jarStream.write(getFileContents(library).getBytes());
jarStream.closeEntry();
}
}
private static String getFileContents(File file) throws IOException {
StringBuilder sb = new StringBuilder();
BufferedReader reader = new BufferedReader(new FileReader(file));
char[] chars = new char[1024];
int numRead = 0;
while((numRead=reader.read(chars)) != -1){
String readData = String.valueOf(chars, 0, numRead);
sb.append(readData);
chars = new char[1024];
}
reader.close();
return sb.toString();
}
private static void addFileToZip (File currentFile, ZipOutputStream zipStream, String dirName, String newName) throws IOException {
String fileName = "";
if (dirName == null) {
dirName = "";
}
else {dirName += "/";}
if (newName == null) {
fileName = currentFile.getName();
}
else { fileName = newName; }
if (currentFile.isFile()) {
try {
zipStream.putNextEntry(new ZipEntry(dirName + fileName));
FileInputStream in = new FileInputStream(currentFile);
byte[] buf = new byte[2048];
int n;
while ((n = in.read(buf)) != -1) {
zipStream.write(buf, 0, n);
}
} finally {
zipStream.closeEntry();
}
}
else {
dirName = dirName + currentFile.getName();
File[] current = currentFile.listFiles();
for (int i=0; i<current.length; i++) {
addFileToZip(current[i], zipStream, dirName, null);
}
}
}
private static void addStringToZip (String contents, ZipOutputStream zipStream, String pathInZip) throws IOException {
zipStream.putNextEntry(new ZipEntry(pathInZip));
zipStream.write(contents.getBytes());
zipStream.closeEntry();
}
private static void writeFileToJar(String filePathname, JarOutputStream jarStream, String dirName, String newName) throws IOException {
File currentFile = new File (filePathname);
String fileName = "";
if (dirName == "") {
dirName = "content/";
}
if (newName == null) {
fileName = currentFile.getName();
}
else { fileName = newName; }
if (currentFile.isFile()) {
FileInputStream entryStream = new FileInputStream(filePathname);
JarEntry newFile = new JarEntry(dirName + fileName);
jarStream.putNextEntry(newFile);
try {
// Allocate a buffer for reading the entry data.
byte[] buffer = new byte[1024];
int bytesRead;
// Read the entry data and write it to the output file.
while ((bytesRead = entryStream.read(buffer)) != -1) {
jarStream.write(buffer, 0, bytesRead);
}
} finally {
entryStream.close();
jarStream.closeEntry();
}
}
else {
dirName = dirName + currentFile.getName() + "/";
File[] current = currentFile.listFiles();
for (int i=0; i<current.length; i++) {
writeFileToJar(current[i].getAbsolutePath(), jarStream, dirName, null);
}
}
}
/**
* Test of ExportXpi -- uses paths hardcoded to mbolin's computer
*/
/* public static void main(String[] args) throws IOException {
String guid = "6";
String chickenfootContractId = "7";
String chickenfootGuid = "9";
String[] templateTags = new String[] {
"EXTENSION_NAME", guid,
"EXTENSION_DISPLAY_NAME", "Foo Bar",
"EXTENSION_AUTHOR", "bolinfest",
"GUID", guid,
"VERSION", "0.1",
"DESCRIPTION", "The best foo.bar in town.",
"DEFAULT_INCLUDES", "\"['*']\"",
"DEFAULT_EXCLUDES", "\"[]\"",
"CHICKENFOOT_CONTRACT_ID", chickenfootContractId,
"CHICKENFOOT_GUID", chickenfootGuid,
"IS_EXPORTED_XPI", "true"
};
String outputPath = xpiTie(
"document.title = 6",
"c:\\my.xpi",
templateTags,
"C:\\Documents and Settings\\mbolin\\Application Data\\Mozilla\\Firefox\\Profiles\\47z749tv.default\\extensions\\{896b34a4-c83f-4ea7-8ef0-51ed7220ac94}\\",
new String[0], "");
System.out.println("Wrote extension to: " + outputPath);
}*/
}
| java/chickenfoot/ExportXpi.java | /*
* Chickenfoot end-user web automation system
*
* Copyright (c) 2004-2007 Massachusetts Institute of Technology
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
* Chickenfoot homepage: http://uid.csail.mit.edu/chickenfoot/
*/
package chickenfoot;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.InputStream;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;
public final class ExportXpi {
private static final Map<String, String> TEMPLATE_TO_JAR_PATH;
static {
Map<String, String> map = new HashMap<String, String>();
map.put("contents.rdf", "content/contents.rdf");
map.put("overlay.xul", "content/overlay.xul");
map.put("chickenscratch.xul", "content/chickenscratch.xul");
TEMPLATE_TO_JAR_PATH = Collections.unmodifiableMap(map);
}
private static final Map<String, String> TEMPLATE_TO_XPI_PATH;
static {
Map<String, String> map = new HashMap<String, String>();
map.put("Chickenfoot.js", "components/Chickenfoot.js");
map.put("Chicken-bypass.js", "components/Chicken-bypass.js");
// Don't copy this!
// The current thinking is that ChickenfootCommandLineHandler will
// interfere with Chickenfoot, if it is also installed.
// But be aware that we are not copying it.
// map.put("ChickenfootCommandLineHandler.js", "components/ChickenfootCommandLineHandler.js");
map.put("install.template.rdf", "install.rdf");
map.put("preferences.js", "defaults/preferences/preferences.js");
TEMPLATE_TO_XPI_PATH = Collections.unmodifiableMap(map);
}
/** Utility class -- do not instantiate */
private ExportXpi() {
}
/**
*
* @param xmlStringsArray
* @param outputPath
* @param templateTagsArray
* @param extensionPath - may need to convert from file:///C:/... to
* something Java understands
* @param userFilesArray -of strings representing file pathnames
* @param iconPath
* @return
*/
public static String xpiTie(String xmlString, String outputPath, String[] templateTagsArray,
String extensionPath, String[] userFiles, String iconPath)
throws IOException {
// deserialize templateTagsArray into a map
Map<String, String> tagMap = new HashMap<String, String>();
for (int i = 0; i < templateTagsArray.length; i += 2) {
tagMap.put(templateTagsArray[i], templateTagsArray[i + 1]);
}
// populate templates in xpi-tie directory
File xpiTieDirectory = new File(extensionPath, "export");
File[] templateFiles = xpiTieDirectory.listFiles();
Map<String, String> fileName2populatedTemplate = new HashMap<String, String>();
for (File template : templateFiles) {
fileName2populatedTemplate.put(template.getName(),
populateTemplate(getFileContents(template), tagMap));
}
// write ASCII entries into JAR
File jarFile = File.createTempFile("output", ".jar");
JarOutputStream jarStream = new JarOutputStream(new FileOutputStream(
jarFile));
for (String fileName : TEMPLATE_TO_JAR_PATH.keySet()) {
String jarFilePath = TEMPLATE_TO_JAR_PATH.get(fileName);
JarEntry jarEntry = new JarEntry(jarFilePath);
jarStream.putNextEntry(jarEntry);
jarStream.write(fileName2populatedTemplate.get(fileName).getBytes());
jarStream.closeEntry();
}
//write logo to JAR
if ((iconPath != null) && (iconPath != "")) {
writeFileToJar(iconPath, jarStream, "", "icon.png");
}
else {
File chromeDirectory = new File(extensionPath, "chrome");
JarFile chickenfootChromeJar = new JarFile(new File(chromeDirectory, "chickenfoot.jar"));
JarEntry logo = chickenfootChromeJar.getJarEntry("skin/classic/beak-32.png");
JarEntry insertedLogo = new JarEntry("content/icon.png");
jarStream.putNextEntry(insertedLogo);
InputStream entryStream = chickenfootChromeJar.getInputStream(logo);
try {
// Allocate a buffer for reading the entry data.
byte[] buffer = new byte[1024];
int bytesRead;
// Read the entry data and write it to the output file.
while ((bytesRead = entryStream.read(buffer)) != -1) {
jarStream.write(buffer, 0, bytesRead);
}
} finally {
entryStream.close();
}
jarStream.closeEntry();
}
// write libraries into JAR
// TODO(mbolin): read libraries out of chickenfoot-java.jar instead of libraries directory
// so there does not have to be two copies of each library file in chickenfoot.xpi
File librariesDirectory = new File(extensionPath, "libraries");
File[] libraryFiles = librariesDirectory.listFiles();
for (File library : libraryFiles) {
insertLibraryFileIntoJar(library, jarStream, null);
}
jarStream.finish();
jarStream.close();
// write ASCII entries into XPI
File xpiFile = new File(outputPath);
ZipOutputStream zipStream = new ZipOutputStream(new FileOutputStream(
xpiFile));
for (String fileName : TEMPLATE_TO_XPI_PATH.keySet()) {
String xpiFilePath = TEMPLATE_TO_XPI_PATH.get(fileName);
String contents = fileName2populatedTemplate.get(fileName);
addStringToZip(contents, zipStream, xpiFilePath);
}
// write native libraries into XPI
File componentDirectory = new File(extensionPath, "components");
File[] componentFiles = componentDirectory.listFiles();
for (File component : componentFiles) {
String name = component.getName();
if (name.indexOf("ChickenSleep") != -1) {
addFileToZip(component, zipStream, "components", null);
}
}
//write user files, including trigger files, into XPI
for (int k=0; k<userFiles.length; k++) {
try {
File current = new File(userFiles[k]);
addFileToZip(current, zipStream, null, null);
}
catch(IOException err) {
continue;
}
}
//write triggers.xml into XPI
File tempFile = File.createTempFile("triggers", ".xml");
FileWriter writer = new FileWriter(tempFile);
writer.write(xmlString);
writer.close();
addFileToZip(tempFile, zipStream, null, "triggers.xml");
tempFile.delete();
// write JAR into XPI
addFileToZip(jarFile, zipStream, "chrome", "chickenfoot-xpi-tie.jar");
// copy the "java" directory into the XPI
{
String dirName = "java";
File dir = new File(extensionPath, dirName);
for (File file : dir.listFiles()) {
addFileToZip(file, zipStream, "java", null);
}
}
// close XPI
zipStream.finish();
zipStream.close();
return xpiFile.getCanonicalPath();
}
private static String populateTemplate(String template,
Map<String, String> tagMap) {
// TODO: eliminate this loop by using a lambda-replace
for (String tag : tagMap.keySet()) {
template = template.replaceAll("@" + tag + "@", tagMap.get(tag));
}
return template;
}
private static void insertLibraryFileIntoJar(File library, JarOutputStream jarStream, String prefix) throws IOException {
if (library.isDirectory()) {
File[] files = library.listFiles();
prefix = (prefix == null) ? library.getName() : prefix + "/" + library.getName();
for (File f : files) {
insertLibraryFileIntoJar(f, jarStream, prefix);
}
} else {
String path = library.getName();
if (prefix != null) path = prefix + "/" + path;
path = "content/libraries/" + path;
JarEntry jarEntry = new JarEntry(path);
jarStream.putNextEntry(jarEntry);
jarStream.write(getFileContents(library).getBytes());
jarStream.closeEntry();
}
}
private static String getFileContents(File file) throws IOException {
StringBuilder sb = new StringBuilder();
BufferedReader reader = new BufferedReader(new FileReader(file));
char[] chars = new char[1024];
int numRead = 0;
while((numRead=reader.read(chars)) != -1){
String readData = String.valueOf(chars, 0, numRead);
sb.append(readData);
chars = new char[1024];
}
reader.close();
return sb.toString();
}
private static void addFileToZip (File currentFile, ZipOutputStream zipStream, String dirName, String newName) throws IOException {
String fileName = "";
if (dirName == null) {
dirName = "";
}
else {dirName += "/";}
if (newName == null) {
fileName = currentFile.getName();
}
else { fileName = newName; }
if (currentFile.isFile()) {
try {
zipStream.putNextEntry(new ZipEntry(dirName + fileName));
FileInputStream in = new FileInputStream(currentFile);
byte[] buf = new byte[2048];
int n;
while ((n = in.read(buf)) != -1) {
zipStream.write(buf, 0, n);
}
} finally {
zipStream.closeEntry();
}
}
else {
dirName = dirName + currentFile.getName();
File[] current = currentFile.listFiles();
for (int i=0; i<current.length; i++) {
addFileToZip(current[i], zipStream, dirName, null);
}
}
}
private static void addStringToZip (String contents, ZipOutputStream zipStream, String pathInZip) throws IOException {
zipStream.putNextEntry(new ZipEntry(pathInZip));
zipStream.write(contents.getBytes());
zipStream.closeEntry();
}
private static void writeFileToJar(String filePathname, JarOutputStream jarStream, String dirName, String newName) throws IOException {
File currentFile = new File (filePathname);
String fileName = "";
if (dirName == "") {
dirName = "content/";
}
if (newName == null) {
fileName = currentFile.getName();
}
else { fileName = newName; }
if (currentFile.isFile()) {
FileInputStream entryStream = new FileInputStream(filePathname);
JarEntry newFile = new JarEntry(dirName + fileName);
jarStream.putNextEntry(newFile);
try {
// Allocate a buffer for reading the entry data.
byte[] buffer = new byte[1024];
int bytesRead;
// Read the entry data and write it to the output file.
while ((bytesRead = entryStream.read(buffer)) != -1) {
jarStream.write(buffer, 0, bytesRead);
}
} finally {
entryStream.close();
jarStream.closeEntry();
}
}
else {
dirName = dirName + currentFile.getName() + "/";
File[] current = currentFile.listFiles();
for (int i=0; i<current.length; i++) {
writeFileToJar(current[i].getAbsolutePath(), jarStream, dirName, null);
}
}
}
/**
* Test of ExportXpi -- uses paths hardcoded to mbolin's computer
*/
/* public static void main(String[] args) throws IOException {
String guid = "6";
String chickenfootContractId = "7";
String chickenfootGuid = "9";
String[] templateTags = new String[] {
"EXTENSION_NAME", guid,
"EXTENSION_DISPLAY_NAME", "Foo Bar",
"EXTENSION_AUTHOR", "bolinfest",
"GUID", guid,
"VERSION", "0.1",
"DESCRIPTION", "The best foo.bar in town.",
"DEFAULT_INCLUDES", "\"['*']\"",
"DEFAULT_EXCLUDES", "\"[]\"",
"CHICKENFOOT_CONTRACT_ID", chickenfootContractId,
"CHICKENFOOT_GUID", chickenfootGuid,
"IS_EXPORTED_XPI", "true"
};
String outputPath = xpiTie(
"document.title = 6",
"c:\\my.xpi",
templateTags,
"C:\\Documents and Settings\\mbolin\\Application Data\\Mozilla\\Firefox\\Profiles\\47z749tv.default\\extensions\\{896b34a4-c83f-4ea7-8ef0-51ed7220ac94}\\",
new String[0], "");
System.out.println("Wrote extension to: " + outputPath);
}*/
}
| changes to java file may not have been committed for extension packager bug fixes
git-svn-id: f53f06c20de4bf2ad40f5e64829b4f49e42340ba@1202 21796ef0-4f62-4552-8d1b-68394bc7e1f1
| java/chickenfoot/ExportXpi.java | changes to java file may not have been committed for extension packager bug fixes |
|
Java | mit | 8d9b2d16acbcab1aaaa00887dd7f92bb381bc493 | 0 | Elecs-Mods/RFTools,ReneMuetti/RFTools,McJty/RFTools | package mcjty.rftools.blocks.security;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import mcjty.entity.GenericTileEntity;
import mcjty.rftools.RFTools;
import mcjty.rftools.network.PacketHandler;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.server.MinecraftServer;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.world.World;
import org.lwjgl.input.Keyboard;
import java.util.List;
public class SecurityCardItem extends Item {
public static String channelNameFromServer = "";
private static long lastTime = 0;
public SecurityCardItem() {
setMaxStackSize(1);
}
@Override
public int getMaxItemUseDuration(ItemStack stack) {
return 1;
}
@SideOnly(Side.CLIENT)
@Override
public void addInformation(ItemStack itemStack, EntityPlayer player, List list, boolean whatIsThis) {
super.addInformation(itemStack, player, list, whatIsThis);
NBTTagCompound tagCompound = itemStack.getTagCompound();
int channel = -1;
if (tagCompound != null && tagCompound.hasKey("channel")) {
channel = tagCompound.getInteger("channel");
}
if (channel != -1) {
if (System.currentTimeMillis() - lastTime > 250) {
lastTime = System.currentTimeMillis();
PacketHandler.INSTANCE.sendToServer(new PacketGetSecurityName(channel));
}
list.add(EnumChatFormatting.YELLOW + "Channel: " + channel + " (" + channelNameFromServer + ")");
} else {
list.add(EnumChatFormatting.YELLOW + "Channel is not set!");
}
if (Keyboard.isKeyDown(Keyboard.KEY_LSHIFT) || Keyboard.isKeyDown(Keyboard.KEY_RSHIFT)) {
list.add(EnumChatFormatting.WHITE + "Manage security channels in the Security Manager");
list.add(EnumChatFormatting.WHITE + "and link this card to a channel. Sneak right-click");
list.add(EnumChatFormatting.WHITE + "a block to link the channel to that block.");
list.add(EnumChatFormatting.WHITE + "If you want to copy the channel from a block to");
list.add(EnumChatFormatting.WHITE + "a card you can right click with an unlinked card");
} else {
list.add(EnumChatFormatting.WHITE + RFTools.SHIFT_MESSAGE);
}
}
@Override
public boolean onItemUse(ItemStack stack, EntityPlayer player, World world, int x, int y, int z, int side, float sx, float sy, float sz) {
if (!world.isRemote) {
TileEntity te = world.getTileEntity(x, y, z);
if (te instanceof GenericTileEntity) {
GenericTileEntity genericTileEntity = (GenericTileEntity) te;
if (genericTileEntity.getOwnerUUID() == null) {
RFTools.message(player, EnumChatFormatting.RED + "This block has no owner!");
} else {
NBTTagCompound tagCompound = stack.getTagCompound();
if (tagCompound == null || !tagCompound.hasKey("channel")) {
int blockSecurity = genericTileEntity.getSecurityChannel();
if (blockSecurity == -1) {
RFTools.message(player, EnumChatFormatting.RED + "This security card is not setup correctly!");
} else {
if (tagCompound == null) {
tagCompound = new NBTTagCompound();
stack.setTagCompound(tagCompound);
}
tagCompound.setInteger("channel", blockSecurity);
RFTools.message(player, EnumChatFormatting.RED + "Copied security channel from block to card!");
}
} else {
int channel = tagCompound.getInteger("channel");
if (player.capabilities.isCreativeMode || MinecraftServer.getServer().getConfigurationManager().func_152596_g(player.getGameProfile())) {
toggleSecuritySettings(player, genericTileEntity, channel);
} else if (genericTileEntity.getOwnerUUID().equals(player.getPersistentID())) {
toggleSecuritySettings(player, genericTileEntity, channel);
} else {
RFTools.message(player, EnumChatFormatting.RED + "You cannot change security settings of a block you don't own!");
}
}
}
} else {
RFTools.message(player, EnumChatFormatting.RED + "Security is not supported on this block!");
}
return true;
}
return true;
}
private void toggleSecuritySettings(EntityPlayer player, GenericTileEntity genericTileEntity, int channel) {
int sec = genericTileEntity.getSecurityChannel();
if (sec == channel) {
genericTileEntity.setSecurityChannel(-1);
RFTools.message(player, "Security settings cleared from block!");
} else {
genericTileEntity.setSecurityChannel(channel);
RFTools.message(player, "Security settings applied on block!");
}
}
} | src/main/java/mcjty/rftools/blocks/security/SecurityCardItem.java | package mcjty.rftools.blocks.security;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
import mcjty.entity.GenericTileEntity;
import mcjty.rftools.RFTools;
import mcjty.rftools.network.PacketHandler;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.Item;
import net.minecraft.item.ItemStack;
import net.minecraft.nbt.NBTTagCompound;
import net.minecraft.server.MinecraftServer;
import net.minecraft.tileentity.TileEntity;
import net.minecraft.util.EnumChatFormatting;
import net.minecraft.world.World;
import org.lwjgl.input.Keyboard;
import java.util.List;
public class SecurityCardItem extends Item {
public static String channelNameFromServer = "";
private static long lastTime = 0;
public SecurityCardItem() {
setMaxStackSize(1);
}
@Override
public int getMaxItemUseDuration(ItemStack stack) {
return 1;
}
@SideOnly(Side.CLIENT)
@Override
public void addInformation(ItemStack itemStack, EntityPlayer player, List list, boolean whatIsThis) {
super.addInformation(itemStack, player, list, whatIsThis);
NBTTagCompound tagCompound = itemStack.getTagCompound();
int channel = -1;
if (tagCompound != null && tagCompound.hasKey("channel")) {
channel = tagCompound.getInteger("channel");
}
if (channel != -1) {
if (System.currentTimeMillis() - lastTime > 250) {
lastTime = System.currentTimeMillis();
PacketHandler.INSTANCE.sendToServer(new PacketGetSecurityName(channel));
}
list.add(EnumChatFormatting.YELLOW + "Channel: " + channel + " (" + channelNameFromServer + ")");
} else {
list.add(EnumChatFormatting.YELLOW + "Channel is not set!");
}
if (Keyboard.isKeyDown(Keyboard.KEY_LSHIFT) || Keyboard.isKeyDown(Keyboard.KEY_RSHIFT)) {
list.add(EnumChatFormatting.WHITE + "Manage security channels in the Security Manager");
list.add(EnumChatFormatting.WHITE + "and link this card to a channel. Sneak right-click");
list.add(EnumChatFormatting.WHITE + "a block to link the channel to that block");
} else {
list.add(EnumChatFormatting.WHITE + RFTools.SHIFT_MESSAGE);
}
}
@Override
public boolean onItemUse(ItemStack stack, EntityPlayer player, World world, int x, int y, int z, int side, float sx, float sy, float sz) {
if (!world.isRemote) {
NBTTagCompound tagCompound = stack.getTagCompound();
if (tagCompound == null || !tagCompound.hasKey("channel")) {
RFTools.message(player, EnumChatFormatting.RED + "This security card is not setup correctly!");
}
int channel = tagCompound.getInteger("channel");
TileEntity te = world.getTileEntity(x, y, z);
if (te instanceof GenericTileEntity) {
GenericTileEntity genericTileEntity = (GenericTileEntity) te;
if (genericTileEntity.getOwnerUUID() == null) {
RFTools.message(player, EnumChatFormatting.RED + "This block has no owner!");
} else {
if (player.capabilities.isCreativeMode || MinecraftServer.getServer().getConfigurationManager().func_152596_g(player.getGameProfile())) {
int sec = genericTileEntity.getSecurityChannel();
if (sec == channel) {
genericTileEntity.setSecurityChannel(-1);
RFTools.message(player, "Security settings cleared!");
} else {
genericTileEntity.setSecurityChannel(channel);
RFTools.message(player, "Security settings applied!");
}
} else if (genericTileEntity.getOwnerUUID().equals(player.getPersistentID())) {
int sec = genericTileEntity.getSecurityChannel();
if (sec == channel) {
genericTileEntity.setSecurityChannel(-1);
RFTools.message(player, "Security settings cleared!");
} else {
genericTileEntity.setSecurityChannel(channel);
RFTools.message(player, "Security settings applied!");
}
} else {
RFTools.message(player, EnumChatFormatting.RED + "You cannot change security settings of a block you don't own!");
}
}
} else {
RFTools.message(player, EnumChatFormatting.RED + "Onwership is not supported on this block!");
}
return true;
}
return true;
}
} | Added the possibility to copy a security channel from a block to a card
| src/main/java/mcjty/rftools/blocks/security/SecurityCardItem.java | Added the possibility to copy a security channel from a block to a card |
|
Java | mit | e3f22b2ac22f340d64812c3be77669988fba3bb7 | 0 | nls-oskari/oskari-server,nls-oskari/oskari-server,nls-oskari/oskari-server | package fi.nls.oskari.control.admin;
import fi.nls.oskari.annotation.OskariActionRoute;
import fi.nls.oskari.control.*;
import fi.nls.oskari.domain.map.OskariLayer;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.map.layer.OskariLayerService;
import fi.nls.oskari.service.OskariComponentManager;
import fi.nls.oskari.service.ServiceRuntimeException;
import fi.nls.oskari.util.ResponseHelper;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.*;
@OskariActionRoute("LayerAdminUsageCheck")
public class LayerAdminUsageCheckHandler extends RestActionHandler {
private static final Logger LOG = LogFactory.getLogger(LayerAdminUsageCheckHandler.class);
private static final String PARAM_LAYER_ID = "id";
private OskariLayerService mapLayerService;
@Override
public void init() {
try {
mapLayerService = OskariComponentManager.getComponentOfType(OskariLayerService.class);
} catch (Exception e) {
throw new ServiceRuntimeException("Exception occured while initializing map layer service", e);
}
}
@Override
public void handleGet(ActionParameters params) throws ActionException {
params.requireAdminUser();
final int layerId = params.getRequiredParamInt(PARAM_LAYER_ID);
LOG.info("Checking layer usage in other layers for layerId: ", layerId);
Map<String, Set<Integer>> layerUsages = new HashMap<>();
Set<Integer> timeseriesLayerIds = getTimeseriesLayerIds(layerId);
layerUsages.put("timeseries", timeseriesLayerIds);
ResponseHelper.writeResponse(params, new JSONObject(layerUsages));
}
private Set<Integer> getTimeseriesLayerIds(int layerId) throws ActionException {
Set<Integer> timeseriesLayerIds = new HashSet<>();
for (OskariLayer layer : mapLayerService.findAll()) {
JSONObject options = layer.getOptions();
try {
if (options != null && options.has("timeseries")) {
JSONObject timeseriesOptions = options.getJSONObject("timeseries");
JSONObject timeseriesMetadata = timeseriesOptions.optJSONObject("metadata");
if (timeseriesMetadata == null) {
continue;
}
Integer metadataLayerId = timeseriesMetadata.getInt("layer");
if (metadataLayerId == layerId) {
timeseriesLayerIds.add(layer.getId());
}
}
} catch (JSONException e) {
throw new ActionException("Cannot parse layer metadata options for layer: " +
layer.getName() + ", id: " + layer.getId());
}
}
return timeseriesLayerIds;
}
}
| control-admin/src/main/java/fi/nls/oskari/control/admin/LayerAdminUsageCheckHandler.java | package fi.nls.oskari.control.admin;
import fi.nls.oskari.annotation.OskariActionRoute;
import fi.nls.oskari.control.*;
import fi.nls.oskari.domain.map.OskariLayer;
import fi.nls.oskari.log.LogFactory;
import fi.nls.oskari.log.Logger;
import fi.nls.oskari.map.layer.OskariLayerService;
import fi.nls.oskari.service.OskariComponentManager;
import fi.nls.oskari.service.ServiceRuntimeException;
import fi.nls.oskari.util.ResponseHelper;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.*;
@OskariActionRoute("LayerAdminUsageCheck")
public class LayerAdminUsageCheckHandler extends RestActionHandler {
private static final Logger LOG = LogFactory.getLogger(LayerAdminUsageCheckHandler.class);
private static final String PARAM_LAYER_ID = "id";
private OskariLayerService mapLayerService;
@Override
public void init() {
try {
mapLayerService = OskariComponentManager.getComponentOfType(OskariLayerService.class);
} catch (Exception e) {
throw new ServiceRuntimeException("Exception occured while initializing map layer service", e);
}
}
@Override
public void handleGet(ActionParameters params) throws ActionException {
params.requireAdminUser();
final int layerId = params.getRequiredParamInt(PARAM_LAYER_ID);
LOG.info("Checking layer usage in other layers for layerId: ", layerId);
Map<String, Set<Integer>> layerUsages = new HashMap<>();
Set<Integer> timeseriesLayerIds = getTimeseriesLayerIds(layerId);
layerUsages.put("timeseries", timeseriesLayerIds);
ResponseHelper.writeResponse(params, layerUsages);
}
private Set<Integer> getTimeseriesLayerIds(int layerId) throws ActionException {
Set<Integer> timeseriesLayerIds = new HashSet<>();
for (OskariLayer layer : mapLayerService.findAll()) {
JSONObject options = layer.getOptions();
try {
if (options != null && options.has("timeseries")) {
JSONObject timeseriesOptions = options.getJSONObject("timeseries");
Integer metadataLayerId = timeseriesOptions.getJSONObject("metadata").getInt("layer");
if (metadataLayerId == layerId) {
timeseriesLayerIds.add(layer.getId());
}
}
} catch (JSONException e) {
throw new ActionException("Cannot parse layer metadata options for layer: " +
layer.getName() + ", id: " + layer.getId());
}
}
return timeseriesLayerIds;
}
}
| Use JSONObject in response. Better JSON handling
| control-admin/src/main/java/fi/nls/oskari/control/admin/LayerAdminUsageCheckHandler.java | Use JSONObject in response. Better JSON handling |
|
Java | mit | 82c57bd77a4a657625826dc8d2cf891ed213a60e | 0 | doychin/webstart,mojohaus/webstart,mojohaus/webstart,doychin/webstart | /*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License" );
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.mojo.webstart;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.SystemUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugin.jar.JarSignVerifyMojo;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.archiver.manager.ArchiverManager;
import org.codehaus.plexus.util.DirectoryScanner;
import org.codehaus.plexus.util.FileUtils;
/**
* The superclass for all JNLP generating MOJOs.
*
* @author Kevin Stembridge
* @author $LastChangedBy$
* @since 28 May 2007
* @version $Revision$
*
*/
public abstract class AbstractBaseJnlpMojo extends AbstractMojo
{
private static final String DEFAULT_RESOURCES_DIR = "src/main/jnlp/resources";
/** unprocessed files (that will be signed) are prefixed with this */
private static final String UNPROCESSED_PREFIX = "unprocessed_";
/**
* Artifact resolver, needed to download source jars for inclusion in classpath.
*
* @component
* @required
* @readonly
*/
private ArtifactResolver artifactResolver;
/**
* Artifact factory, needed to download source jars for inclusion in classpath.
*
* @component
* @required
* @readonly
*/
private ArtifactFactory artifactFactory;
/**
* @parameter expression="${localRepository}"
* @required
* @readonly
*/
private ArtifactRepository localRepository;
/**
* The collection of remote artifact repositories.
*
* @parameter expression="${project.remoteArtifactRepositories}"
* @readonly
* @required
*/
private List remoteRepositories;
/**
* The directory in which files will be stored prior to processing.
*
* @parameter expression="${project.build.directory}/jnlp"
* @required
*/
private File workDirectory;
/**
* The path where the libraries are placed within the jnlp structure.
*
* @parameter expression=""
*/
protected String libPath;
/**
* The location of the directory (relative or absolute) containing non-jar resources that
* are to be included in the JNLP bundle.
*
* @parameter
*/
private File resourcesDirectory;
/**
* The location where the JNLP Velocity template files are stored.
*
* @parameter expression="${project.basedir}/src/main/jnlp"
* @required
*/
private File templateDirectory;
/**
* Indicates whether or not jar resources should be compressed
* using pack200. Setting this value to true requires SDK 5.0 or greater.
*
* @parameter default-value="false"
*/
private boolean pack200;
/**
* The Sign Config
*
* @parameter implementation="org.codehaus.mojo.webstart.JarSignMojoConfig"
*/
private SignConfig sign;
/**
* Indicates whether or not jar files should be verified after signing.
*
* @parameter default-value="true"
*/
private boolean verifyjar;
/**
* Indicates whether or not gzip archives will be created for each of the jar
* files included in the webstart bundle.
*
* @parameter default-value="false"
*/
private boolean gzip;
/**
* Enable verbose output.
*
* @parameter expression="${verbose}" default-value="false"
*/
private boolean verbose;
/**
* Set to true to exclude all transitive dependencies.
*
* @parameter
*/
private boolean excludeTransitive;
private final List modifiedJnlpArtifacts = new ArrayList();
// the jars to sign and pack are selected if they are prefixed by UNPROCESSED_PREFIX.
// as the plugin copies the new versions locally before signing/packing them
// we just need to see if the plugin copied a new version
// We achieve that by only filtering files modified after the plugin was started
// Note: if other files (the pom, the keystore config) have changed, one needs to clean
private final FileFilter unprocessedJarFileFilter;
private final FileFilter processedJarFileFilter;
private final FileFilter unprocessedPack200FileFilter;
/**
* Define whether to remove existing signatures.
*
* @parameter alias="unsign" default-value="false"
*/
private boolean unsignAlreadySignedJars;
/**
* To look up Archiver/UnArchiver implementations
*
* @parameter expression="${component.org.codehaus.plexus.archiver.manager.ArchiverManager}"
* @required
*/
protected ArchiverManager archiverManager;
/**
* Creates a new {@code AbstractBaseJnlpMojo}.
*/
public AbstractBaseJnlpMojo()
{
processedJarFileFilter = new FileFilter() {
public boolean accept( File pathname )
{
return pathname.isFile() && pathname.getName().endsWith( ".jar" )
&& ! pathname.getName().startsWith( UNPROCESSED_PREFIX );
}
};
unprocessedJarFileFilter = new FileFilter() {
public boolean accept( File pathname )
{
return pathname.isFile() && pathname.getName().startsWith( UNPROCESSED_PREFIX )
&& pathname.getName().endsWith( ".jar" );
}
};
unprocessedPack200FileFilter = new UnprocessedPack200FileFilter();
}
protected void makeWorkingDirIfNecessary() throws MojoExecutionException
{
if ( !getWorkDirectory().exists() && !getWorkDirectory().mkdirs() )
{
throw new MojoExecutionException( "Failed to create: " + getWorkDirectory().getAbsolutePath() );
}
// check and create the library path
if (!getLibDirectory().exists() && !getLibDirectory().mkdirs())
{
throw new MojoExecutionException("Failed to create: " + getLibDirectory().getAbsolutePath());
}
}
public abstract MavenProject getProject();
/**
* Returns the working directory. This is the directory in which files and resources
* will be placed in order to be processed prior to packaging.
* @return Returns the value of the workDirectory field.
*/
protected File getWorkDirectory()
{
return workDirectory;
}
/**
* Returns the library directory. If not libPath is configured, the working directory is returned.
* @return Returns the value of the libraryDirectory field.
*/
protected File getLibDirectory() {
if (getLibPath() != null) {
return new File(getWorkDirectory(), getLibPath());
}
return getWorkDirectory();
}
/**
* Returns the library path. This is ths subpath within the working directory, where the libraries are placed.
* If the path is not configured it is <code>null</code>.
* @return the library path or <code>null</code> if not configured.
*/
public String getLibPath() {
if (( libPath == null ) || ( libPath.trim().length() == 0 )){
return null;
}
return libPath;
}
/**
* Returns the location of the directory containing
* non-jar resources that are to be included in the JNLP bundle.
*
* @return Returns the value of the resourcesDirectory field, never null.
*/
protected File getResourcesDirectory()
{
if ( resourcesDirectory == null )
{
resourcesDirectory = new File(getProject().getBasedir(), DEFAULT_RESOURCES_DIR );
}
return resourcesDirectory;
}
/**
* Returns the file handle to the directory containing the Velocity templates for the JNLP
* files to be generated.
* @return Returns the value of the templateDirectory field.
*/
protected File getTemplateDirectory()
{
return templateDirectory;
}
/**
* Returns the ArtifactFactory that can be used to create artifacts that
* need to be retrieved from maven artifact repositories.
* @return Returns the value of the artifactFactory field.
*/
protected ArtifactFactory getArtifactFactory()
{
return artifactFactory;
}
/**
* Returns the ArtifactResolver that can be used to retrieve artifacts
* from maven artifact repositories.
* @return Returns the value of the artifactResolver field.
*/
protected ArtifactResolver getArtifactResolver()
{
return artifactResolver;
}
/**
* Returns the local artifact repository.
* @return Returns the value of the localRepository field.
*/
protected ArtifactRepository getLocalRepository()
{
return localRepository;
}
/**
* Returns the collection of remote artifact repositories for the current
* Maven project.
* @return Returns the value of the remoteRepositories field.
*/
protected List getRemoteRepositories()
{
return remoteRepositories;
}
/**
* Returns the flag that indicates whether or not jar resources
* will be compressed using pack200.
*
* @return Returns the value of the pack200 field.
*/
public boolean isPack200()
{
return pack200;
}
/**
* Returns jar signing configuration element.
* @return Returns the value of the sign field.
*/
protected SignConfig getSign()
{
return sign;
}
/**
* Returns the flag that indicates whether or not a gzip should be
* created for each jar resource.
* @return Returns the value of the gzip field.
*/
protected boolean isGzip()
{
return gzip;
}
/**
* Returns the flag that indicates whether or not to provide verbose output.
* @return Returns the value of the verbose field.
*/
protected boolean isVerbose()
{
return verbose;
}
/**
* Returns the flag that indicates whether or not jars should be verified after signing.
* @return Returns the value of the verifyjar field.
*/
protected boolean isVerifyjar()
{
return verifyjar;
}
/**
* Returns the flag that indicates whether or not all transitive dependencies will be excluded
* from the generated JNLP bundle.
* @return Returns the value of the excludeTransitive field.
*/
protected boolean isExcludeTransitive()
{
return this.excludeTransitive;
}
/**
* Returns the collection of artifacts that have been modified
* since the last time this mojo was run.
* @return Returns the value of the modifiedJnlpArtifacts field.
*/
protected List getModifiedJnlpArtifacts()
{
return modifiedJnlpArtifacts;
}
/**
* Confirms that if Pack200 is enabled, the MOJO is being executed in at least a Java 1.5 JVM.
*
* @throws MojoExecutionException
*/
protected void checkPack200() throws MojoExecutionException
{
if ( isPack200() && ( SystemUtils.JAVA_VERSION_FLOAT < 1.5f ) )
{
throw new MojoExecutionException(
"Configuration error: Pack200 compression is only available on SDK 5.0 or above." );
}
}
protected void copyResources( File resourcesDir, File workDirectory ) throws IOException
{
if ( ! resourcesDir.exists() && getLog().isInfoEnabled() )
{
getLog().info( "No resources found in " + resourcesDir.getAbsolutePath() );
}
else
{
if ( ! resourcesDir.isDirectory() )
{
getLog().debug( "Not a directory: " + resourcesDir.getAbsolutePath() );
}
else
{
getLog().debug( "Copying resources from " + resourcesDir.getAbsolutePath() );
// hopefully available from FileUtils 1.0.5-SNAPSHOT
//FileUtils.copyDirectoryStructure( resourcesDir , workDirectory );
// this may needs to be parametrized somehow
String excludes = concat( DirectoryScanner.DEFAULTEXCLUDES, ", " );
copyDirectoryStructure( resourcesDir, workDirectory, "**", excludes );
}
}
}
private static String concat( String[] array, String delim )
{
StringBuffer buffer = new StringBuffer();
for ( int i = 0; i < array.length; i++ )
{
if ( i > 0 )
{
buffer.append( delim );
}
String s = array[i];
buffer.append( s ).append( delim );
}
return buffer.toString();
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory, String includes,
String excludes )
throws IOException
{
if ( ! sourceDirectory.exists() )
{
return;
}
List files = FileUtils.getFiles( sourceDirectory, includes, excludes );
for ( Iterator i = files.iterator(); i.hasNext(); )
{
File file = (File) i.next();
getLog().debug( "Copying " + file + " to " + destinationDirectory );
String path = file.getAbsolutePath().substring( sourceDirectory.getAbsolutePath().length() + 1 );
File destDir = new File( destinationDirectory, path );
getLog().debug( "Copying " + file + " to " + destDir );
if ( file.isDirectory() )
{
destDir.mkdirs();
}
else
{
FileUtils.copyFileToDirectory( file, destDir.getParentFile() );
}
}
}
/**
* Conditionally copy the file into the target directory.
* The operation is not performed when the target file exists and is up to date.
* The target file name is taken from the <code>sourceFile</code> name.
*
* @return <code>true</code> when the file was copied, <code>false</code> otherwise.
* @throws IllegalArgumentException if sourceFile is <code>null</code> or
* <code>sourceFile.getName()</code> is <code>null</code>
* @throws IOException if an error occurs attempting to copy the file.
*/
protected boolean copyFileToDirectoryIfNecessary( File sourceFile, File targetDirectory ) throws IOException
{
if ( sourceFile == null )
{
throw new IllegalArgumentException( "sourceFile is null" );
}
File targetFile = new File( targetDirectory, sourceFile.getName() );
boolean shouldCopy = ! targetFile.exists() || ( targetFile.lastModified() < sourceFile.lastModified() );
if ( shouldCopy )
{
FileUtils.copyFileToDirectory( sourceFile, targetDirectory );
}
else
{
getLog().debug( "Source file hasn't changed. Do not overwrite "
+ targetFile + " with " + sourceFile + "." );
}
return shouldCopy;
}
/**
* Conditionally copy the jar file into the target directory.
* The operation is not performed when a signed target file exists and is up to date.
* The signed target file name is taken from the <code>sourceFile</code> name.E
* The unsigned target file name is taken from the <code>sourceFile</code> name prefixed with UNPROCESSED_PREFIX.
* TODO this is confusing if the sourceFile is already signed. By unsigned we really mean 'unsignedbyus'
*
* @return <code>true</code> when the file was copied, <code>false</code> otherwise.
* @throws IllegalArgumentException if sourceFile is <code>null</code> or
* <code>sourceFile.getName()</code> is <code>null</code>
* @throws IOException if an error occurs attempting to copy the file.
*/
protected boolean copyJarAsUnprocessedToDirectoryIfNecessary( File sourceFile, File targetDirectory ) throws IOException
{
if ( sourceFile == null )
{
throw new IllegalArgumentException( "sourceFile is null" );
}
File signedTargetFile = new File( targetDirectory, sourceFile.getName() );
File unsignedTargetFile = new File( targetDirectory, UNPROCESSED_PREFIX + sourceFile.getName() );
boolean shouldCopy = ! signedTargetFile.exists() || ( signedTargetFile.lastModified() < sourceFile.lastModified() );
shouldCopy = shouldCopy && (! unsignedTargetFile.exists() || ( unsignedTargetFile.lastModified() < sourceFile.lastModified() ) );
if ( shouldCopy )
{
FileUtils.copyFile( sourceFile, unsignedTargetFile );
}
else
{
getLog().debug( "Source file hasn't changed. Do not reprocess "
+ signedTargetFile + " with " + sourceFile + "." );
}
return shouldCopy;
}
/**
* If sign is enabled, sign the jars, otherwise rename them into final jars
*/
protected void signOrRenameJars() throws MojoExecutionException, MojoFailureException
{
if ( getSign() != null )
{
getSign().init(getLog(), getWorkDirectory(), isVerbose());
if( unsignAlreadySignedJars() )
{
removeExistingSignatures( getLibDirectory(), unprocessedJarFileFilter );
}
if ( isPack200() )
{
// http://java.sun.com/j2se/1.5.0/docs/guide/deployment/deployment-guide/pack200.html
// we need to pack then unpack the files before signing them
Pack200.packJars( getLibDirectory(), unprocessedJarFileFilter, isGzip() );
Pack200.unpackJars( getLibDirectory(), unprocessedPack200FileFilter );
// As out current Pack200 ant tasks don't give us the ability to use a temporary area for
// creating those temporary packing, we have to delete the temporary files.
deleteFiles( getLibDirectory(), unprocessedPack200FileFilter );
// specs says that one should do it twice when there are unsigned jars??
// Pack200.unpackJars( applicationDirectory, updatedPack200FileFilter );
}
int signedJars = signJars( getLibDirectory(), unprocessedJarFileFilter );
if ( signedJars != getModifiedJnlpArtifacts().size() )
{
throw new IllegalStateException(
"The number of signed artifacts (" + signedJars + ") differ from the number of modified "
+ "artifacts (" + getModifiedJnlpArtifacts().size() + "). Implementation error" );
}
} else {
makeUnprocessedFilesFinal( getLibDirectory(), unprocessedJarFileFilter );
}
}
private int makeUnprocessedFilesFinal( File directory, FileFilter fileFilter ) throws MojoExecutionException
{
File[] jarFiles = directory.listFiles( fileFilter );
if ( getLog().isDebugEnabled() )
{
getLog().debug( "makeUnprocessedFilesFinal in " + directory + " found " + jarFiles.length + " file(s) to rename" );
}
if ( jarFiles.length == 0 )
{
return 0;
}
for ( int i = 0; i < jarFiles.length; i++ )
{
String unprocessedJarFileName = jarFiles[i].getName();
if (!unprocessedJarFileName.startsWith( UNPROCESSED_PREFIX )) {
throw new IllegalStateException( "We are about to sign an non " + UNPROCESSED_PREFIX
+ " file with path: " + jarFiles[i].getAbsolutePath() );
}
File finalJar = new File( jarFiles[i].getParent(), unprocessedJarFileName.substring( UNPROCESSED_PREFIX.length() ) );
if ( finalJar.exists() ) {
boolean deleted = finalJar.delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete obsolete final jar: " + finalJar.getAbsolutePath() );
}
}
boolean renamed = jarFiles[i].renameTo( finalJar );
if (! renamed) {
throw new IllegalStateException( "Couldn't rename into final jar: " + finalJar.getAbsolutePath() );
}
}
return jarFiles.length;
}
/**
* @return the number of deleted files
*/
private int deleteFiles( File directory, FileFilter fileFilter ) throws MojoExecutionException
{
File[] files = directory.listFiles( fileFilter );
if ( getLog().isDebugEnabled() )
{
getLog().debug( "deleteFiles in " + directory + " found " + files.length + " file(s) to delete" );
}
if ( files.length == 0 )
{
return 0;
}
for ( int i = 0; i < files.length; i++ )
{
boolean deleted = files[i].delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete file: " + files[i].getAbsolutePath() );
}
}
return files.length;
}
/**
* @return the number of signed jars
*/
private int signJars( File directory, FileFilter fileFilter ) throws MojoExecutionException, MojoFailureException
{
File[] jarFiles = directory.listFiles( fileFilter );
if ( getLog().isDebugEnabled() )
{
getLog().debug( "signJars in " + directory + " found " + jarFiles.length + " jar(s) to sign" );
}
if ( jarFiles.length == 0 )
{
return 0;
}
JarSignerMojo jarSigner = getSign().getJarSignerMojo();
for ( int i = 0; i < jarFiles.length; i++ )
{
String unprocessedJarFileName = jarFiles[i].getName();
if (!unprocessedJarFileName.startsWith( UNPROCESSED_PREFIX )) {
throw new IllegalStateException( "We are about to sign an non " + UNPROCESSED_PREFIX
+ " file with path: " + jarFiles[i].getAbsolutePath() );
}
jarSigner.setJarPath( jarFiles[i] );
File signedJar = new File( jarFiles[i].getParent(), unprocessedJarFileName.substring( UNPROCESSED_PREFIX.length() ) );
jarSigner.setSignedJar( signedJar );
if ( signedJar.exists() ) {
boolean deleted = signedJar.delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete obsolete signed jar: " + signedJar.getAbsolutePath() );
}
}
jarSigner.execute();
getLog().debug( "lastModified signedJar:" + signedJar.lastModified() + " unprocessed signed Jar:" + jarFiles[i].lastModified() );
// remove unprocessed files
// TODO wouldn't have to do that if we copied the unprocessed jar files in a temporary area
boolean deleted = jarFiles[i].delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete obsolete unprocessed jar: " + jarFiles[i].getAbsolutePath() );
}
}
return jarFiles.length;
}
protected URL findDefaultJnlpTemplateURL()
{
URL url = this.getClass().getClassLoader().getResource( "default-jnlp-template.vm" );
return url;
}
protected URL getWebstartJarURL()
{
String url = findDefaultJnlpTemplateURL().toString();
try {
return new URL( url.substring( "jar:".length(), url.indexOf( "!" ) ) );
} catch ( Exception e )
{
IllegalStateException iae = new IllegalStateException( "Failure to find webstart Jar URL: " + e.getMessage() );
iae.initCause( e );
throw iae;
}
}
/** @return something of the form jar:file:..../webstart-maven-plugin-.....jar!/ */
protected String getWebstartJarURLForVelocity()
{
String url = findDefaultJnlpTemplateURL().toString();
return url.substring( 0, url.indexOf( "!" ) + 2 );
}
/**
* Removes the signature of the files in the specified directory which satisfy the
* specified filter.
*
* @return the number of unsigned jars
*/
protected int removeExistingSignatures(File workDirectory, FileFilter updatedJarFileFilter)
throws MojoExecutionException
{
verboseLog("Start removing existing signatures");
// cleanup tempDir if exists
File tempDir = new File( workDirectory, "temp_extracted_jars" );
removeDirectory(tempDir);
// recreate temp dir
if ( !tempDir.mkdirs() ) {
throw new MojoExecutionException( "Error creating temporary directory: " + tempDir );
}
// process jars
File[] jarFiles = workDirectory.listFiles( updatedJarFileFilter );
for ( int i = 0; i < jarFiles.length; i++ )
{
if ( isJarSigned( jarFiles[i] ) )
{
verboseLog("remove signature from : " + jarFiles[i]);
unsignJarFile( jarFiles[i], tempDir );
} else
{
verboseLog("not signed : " + jarFiles[i]);
}
}
// cleanup tempDir
removeDirectory(tempDir);
return jarFiles.length; // FIXME this is wrong. Not all jars are signed.
}
private boolean isJarSigned(File jarFile)
{
JarSignVerifyMojo verifyMojo = setupVerifyMojo();
verifyMojo.setJarPath( jarFile );
try {
verifyMojo.execute();
return true;
} catch (MojoExecutionException e) {
return false;
}
}
private void unsignJarFile(File jarFile, File tempDir) throws MojoExecutionException
{
JarUnsignMojo unsignJar = new JarUnsignMojo();
unsignJar.setTempDir( tempDir );
unsignJar.setVerbose( isVerbose() );
unsignJar.setArchiverManager( archiverManager );
unsignJar.setJarPath( jarFile );
unsignJar.execute();
}
/**
* Returns a configured instance of the JarSignVerifyMojo to test whether a
* jar is already signed. The Mojo throws an exception to indicate that a
* jar is not signed yet.
*
* @return a configured instance of the JarSignVerifyMojo.
*/
private JarSignVerifyMojo setupVerifyMojo()
{
JarSignVerifyMojo verifyMojo = new JarSignVerifyMojo();
verifyMojo.setErrorWhenNotSigned(true);
verifyMojo.setWorkingDir(getWorkDirectory());
return verifyMojo;
}
/**
* This is to try to workaround an issue with setting setLastModified.
* See MWEBSTART-28. May be removed later on if that doesn't help.
*/
/*
private boolean setLastModified( File file, long timestamp )
{
boolean result;
int nbretries = 3;
while ( ! (result = file.setLastModified( timestamp )) && ( nbretries-- > 0 ) )
{
getLog().warn("failure to change last modified timestamp... retrying ... See MWEBSTART-28. (especially if you're on NFS).");
try
{
Thread.sleep( 4000 );
}
catch (InterruptedException ignore) {
//TODO should not be ignoring, because this class doesn't control the Thread policy
}
}
return result;
}
*/
protected void packJars()
{
if ( isPack200() )
{
getLog().debug( "packing jars" );
Pack200.packJars( getWorkDirectory(), processedJarFileFilter, isGzip() );
}
}
/**
* TODO finish comment
*
* @param artifact
* @param mainClass
* @return
* @throws MalformedURLException
*/
protected boolean artifactContainsClass( Artifact artifact, final String mainClass ) throws MalformedURLException
{
boolean containsClass = true;
// JarArchiver.grabFilesAndDirs()
ClassLoader cl = new java.net.URLClassLoader( new URL[]{artifact.getFile().toURI().toURL()} );
Class c = null;
try
{
c = Class.forName( mainClass, false, cl );
}
catch ( ClassNotFoundException e )
{
getLog().debug( "artifact " + artifact + " doesn't contain the main class: " + mainClass );
containsClass = false;
}
catch ( Throwable t )
{
getLog().info( "artifact " + artifact + " seems to contain the main class: " + mainClass +
" but the jar doesn't seem to contain all dependencies " + t.getMessage() );
}
if ( c != null )
{
getLog().debug( "Checking if the loaded class contains a main method." );
try
{
c.getMethod( "main", new Class[]{String[].class} );
}
catch ( NoSuchMethodException e )
{
getLog().warn( "The specified main class (" + mainClass +
") doesn't seem to contain a main method... Please check your configuration." + e.getMessage() );
}
catch ( NoClassDefFoundError e )
{
// undocumented in SDK 5.0. is this due to the ClassLoader lazy loading the Method thus making this a case tackled by the JVM Spec (Ref 5.3.5)!
// Reported as Incident 633981 to Sun just in case ...
getLog().warn( "Something failed while checking if the main class contains the main() method. " +
"This is probably due to the limited classpath we have provided to the class loader. " +
"The specified main class (" + mainClass +
") found in the jar is *assumed* to contain a main method... " + e.getMessage() );
}
catch ( Throwable t )
{
getLog().error( "Unknown error: Couldn't check if the main class has a main method. " +
"The specified main class (" + mainClass +
") found in the jar is *assumed* to contain a main method...", t );
}
}
return containsClass;
}
private static class CompositeFileFilter implements FileFilter
{
private List fileFilters = new ArrayList();
CompositeFileFilter( FileFilter filter1, FileFilter filter2 )
{
if ( filter1 == null )
{
throw new IllegalArgumentException( "filter1 must not be null" );
}
if ( filter2 == null )
{
throw new IllegalArgumentException( "filter2 must not be null" );
}
fileFilters.add( filter1 );
fileFilters.add( filter2 );
}
public boolean accept( File pathname )
{
for ( int i = 0; i < fileFilters.size(); i++ )
{
if ( ! ( (FileFilter) fileFilters.get( i ) ).accept( pathname ) )
{
return false;
}
}
return true;
}
}
// anonymous to inner to work-around qdox 1.6.1 bug (MPLUGIN-26)
private static class UnprocessedPack200FileFilter implements FileFilter {
public boolean accept( File pathname )
{
return pathname.isFile() &&
pathname.getName().startsWith( UNPROCESSED_PREFIX ) &&
( pathname.getName().endsWith( ".jar.pack.gz" ) || pathname.getName().endsWith( ".jar.pack" ) );
}
};
/**
*
* @return true if already signed jars should be unsigned prior to signing
* with own key.
*/
protected boolean unsignAlreadySignedJars()
{
return unsignAlreadySignedJars;
}
/**
* Delete the specified directory.
*
* @param dir
* the directory to delete
* @throws MojoExecutionException
*/
private void removeDirectory(File dir) throws MojoExecutionException
{
if (dir != null)
{
if (dir.exists() && dir.isDirectory())
{
getLog().info("Deleting directory " + dir.getAbsolutePath());
Utils.removeDir(dir);
}
}
}
/**
* Log as info when verbose or info is enabled, as debug otherwise.
*/
protected void verboseLog( String msg )
{
infoOrDebug( isVerbose() || getLog().isInfoEnabled(), msg );
}
/** if info is true, log as info(), otherwise as debug() */
private void infoOrDebug( boolean info , String msg )
{
if ( info )
{
getLog().info( msg );
}
else
{
getLog().debug( msg );
}
}
}
| webstart-maven-plugin/src/main/java/org/codehaus/mojo/webstart/AbstractBaseJnlpMojo.java | /*
* Copyright 2001-2005 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License" );
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.mojo.webstart;
import java.io.File;
import java.io.FileFilter;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import org.apache.commons.lang.SystemUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugin.jar.JarSignVerifyMojo;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.archiver.manager.ArchiverManager;
import org.codehaus.plexus.util.DirectoryScanner;
import org.codehaus.plexus.util.FileUtils;
/**
* The superclass for all JNLP generating MOJOs.
*
* @author Kevin Stembridge
* @author $LastChangedBy$
* @since 28 May 2007
* @version $Revision$
*
*/
public abstract class AbstractBaseJnlpMojo extends AbstractMojo
{
private static final String DEFAULT_RESOURCES_DIR = "src/main/jnlp/resources";
/** unprocessed files (that will be signed) are prefixed with this */
private static final String UNPROCESSED_PREFIX = "unprocessed_";
/**
* Artifact resolver, needed to download source jars for inclusion in classpath.
*
* @component
* @required
* @readonly
*/
private ArtifactResolver artifactResolver;
/**
* Artifact factory, needed to download source jars for inclusion in classpath.
*
* @component
* @required
* @readonly
*/
private ArtifactFactory artifactFactory;
/**
* @parameter expression="${localRepository}"
* @required
* @readonly
*/
private ArtifactRepository localRepository;
/**
* The collection of remote artifact repositories.
*
* @parameter expression="${project.remoteArtifactRepositories}"
* @readonly
* @required
*/
private List remoteRepositories;
/**
* The directory in which files will be stored prior to processing.
*
* @parameter expression="${project.build.directory}/jnlp"
* @required
*/
private File workDirectory;
/**
* The path where the libraries are placed within the jnlp structure.
*
* @parameter expression=""
*/
protected String libPath;
/**
* The location of the directory (relative or absolute) containing non-jar resources that
* are to be included in the JNLP bundle.
*
* @parameter
*/
private File resourcesDirectory;
/**
* The location where the JNLP Velocity template files are stored.
*
* @parameter expression="${project.basedir}/src/main/jnlp"
* @required
*/
private File templateDirectory;
/**
* Indicates whether or not jar resources should be compressed
* using pack200. Setting this value to true requires SDK 5.0 or greater.
*
* @parameter default-value="false"
*/
private boolean pack200;
/**
* The Sign Config
*
* @parameter implementation="org.codehaus.mojo.webstart.JarSignMojoConfig"
*/
private SignConfig sign;
/**
* Indicates whether or not jar files should be verified after signing.
*
* @parameter default-value="true"
*/
private boolean verifyjar;
/**
* Indicates whether or not gzip archives will be created for each of the jar
* files included in the webstart bundle.
*
* @parameter default-value="false"
*/
private boolean gzip;
/**
* Enable verbose output.
*
* @parameter expression="${verbose}" default-value="false"
*/
private boolean verbose;
/**
* Set to true to exclude all transitive dependencies.
*
* @parameter
*/
private boolean excludeTransitive;
private final List modifiedJnlpArtifacts = new ArrayList();
// the jars to sign and pack are selected if they are prefixed by UNPROCESSED_PREFIX.
// as the plugin copies the new versions locally before signing/packing them
// we just need to see if the plugin copied a new version
// We achieve that by only filtering files modified after the plugin was started
// Note: if other files (the pom, the keystore config) have changed, one needs to clean
private final FileFilter unprocessedJarFileFilter;
private final FileFilter processedJarFileFilter;
private final FileFilter unprocessedPack200FileFilter;
/**
* Define whether to remove existing signatures.
*
* @parameter alias="unsign" default-value="false"
*/
private boolean unsignAlreadySignedJars;
/**
* To look up Archiver/UnArchiver implementations
*
* @parameter expression="${component.org.codehaus.plexus.archiver.manager.ArchiverManager}"
* @required
*/
protected ArchiverManager archiverManager;
/**
* Creates a new {@code AbstractBaseJnlpMojo}.
*/
public AbstractBaseJnlpMojo()
{
processedJarFileFilter = new FileFilter() {
public boolean accept( File pathname )
{
return pathname.isFile() && pathname.getName().endsWith( ".jar" )
&& ! pathname.getName().startsWith( UNPROCESSED_PREFIX );
}
};
unprocessedJarFileFilter = new FileFilter() {
public boolean accept( File pathname )
{
return pathname.isFile() && pathname.getName().startsWith( UNPROCESSED_PREFIX )
&& pathname.getName().endsWith( ".jar" );
}
};
unprocessedPack200FileFilter = new UnprocessedPack200FileFilter();
}
protected void makeWorkingDirIfNecessary() throws MojoExecutionException
{
if ( !getWorkDirectory().exists() && !getWorkDirectory().mkdirs() )
{
throw new MojoExecutionException( "Failed to create: " + getWorkDirectory().getAbsolutePath() );
}
// check and create the library path
if (!getLibDirectory().exists() && !getLibDirectory().mkdirs())
{
throw new MojoExecutionException("Failed to create: " + getLibDirectory().getAbsolutePath());
}
}
public abstract MavenProject getProject();
/**
* Returns the working directory. This is the directory in which files and resources
* will be placed in order to be processed prior to packaging.
* @return Returns the value of the workDirectory field.
*/
protected File getWorkDirectory()
{
return workDirectory;
}
/**
* Returns the library directory. If not libPath is configured, the working directory is returned.
* @return Returns the value of the libraryDirectory field.
*/
protected File getLibDirectory() {
if (getLibPath() != null) {
return new File(getWorkDirectory(), getLibPath());
}
return getWorkDirectory();
}
/**
* Returns the library path. This is ths subpath within the working directory, where the libraries are placed.
* If the path is not configured it is <code>null</code>.
* @return the library path or <code>null</code> if not configured.
*/
public String getLibPath() {
if (( libPath == null ) || ( libPath.trim().length() == 0 )){
return null;
}
return libPath;
}
/**
* Returns the location of the directory containing
* non-jar resources that are to be included in the JNLP bundle.
*
* @return Returns the value of the resourcesDirectory field, never null.
*/
protected File getResourcesDirectory()
{
if ( resourcesDirectory == null )
{
resourcesDirectory = new File(getProject().getBasedir(), DEFAULT_RESOURCES_DIR );
}
return resourcesDirectory;
}
/**
* Returns the file handle to the directory containing the Velocity templates for the JNLP
* files to be generated.
* @return Returns the value of the templateDirectory field.
*/
protected File getTemplateDirectory()
{
return templateDirectory;
}
/**
* Returns the ArtifactFactory that can be used to create artifacts that
* need to be retrieved from maven artifact repositories.
* @return Returns the value of the artifactFactory field.
*/
protected ArtifactFactory getArtifactFactory()
{
return artifactFactory;
}
/**
* Returns the ArtifactResolver that can be used to retrieve artifacts
* from maven artifact repositories.
* @return Returns the value of the artifactResolver field.
*/
protected ArtifactResolver getArtifactResolver()
{
return artifactResolver;
}
/**
* Returns the local artifact repository.
* @return Returns the value of the localRepository field.
*/
protected ArtifactRepository getLocalRepository()
{
return localRepository;
}
/**
* Returns the collection of remote artifact repositories for the current
* Maven project.
* @return Returns the value of the remoteRepositories field.
*/
protected List getRemoteRepositories()
{
return remoteRepositories;
}
/**
* Returns the flag that indicates whether or not jar resources
* will be compressed using pack200.
*
* @return Returns the value of the pack200 field.
*/
public boolean isPack200()
{
return pack200;
}
/**
* Returns jar signing configuration element.
* @return Returns the value of the sign field.
*/
protected SignConfig getSign()
{
return sign;
}
/**
* Returns the flag that indicates whether or not a gzip should be
* created for each jar resource.
* @return Returns the value of the gzip field.
*/
protected boolean isGzip()
{
return gzip;
}
/**
* Returns the flag that indicates whether or not to provide verbose output.
* @return Returns the value of the verbose field.
*/
protected boolean isVerbose()
{
return verbose;
}
/**
* Returns the flag that indicates whether or not jars should be verified after signing.
* @return Returns the value of the verifyjar field.
*/
protected boolean isVerifyjar()
{
return verifyjar;
}
/**
* Returns the flag that indicates whether or not all transitive dependencies will be excluded
* from the generated JNLP bundle.
* @return Returns the value of the excludeTransitive field.
*/
protected boolean isExcludeTransitive()
{
return this.excludeTransitive;
}
/**
* Returns the collection of artifacts that have been modified
* since the last time this mojo was run.
* @return Returns the value of the modifiedJnlpArtifacts field.
*/
protected List getModifiedJnlpArtifacts()
{
return modifiedJnlpArtifacts;
}
/**
* Confirms that if Pack200 is enabled, the MOJO is being executed in at least a Java 1.5 JVM.
*
* @throws MojoExecutionException
*/
protected void checkPack200() throws MojoExecutionException
{
if ( isPack200() && ( SystemUtils.JAVA_VERSION_FLOAT < 1.5f ) )
{
throw new MojoExecutionException(
"Configuration error: Pack200 compression is only available on SDK 5.0 or above." );
}
}
protected void copyResources( File resourcesDir, File workDirectory ) throws IOException
{
if ( ! resourcesDir.exists() && getLog().isInfoEnabled() )
{
getLog().info( "No resources found in " + resourcesDir.getAbsolutePath() );
}
else
{
if ( ! resourcesDir.isDirectory() )
{
getLog().debug( "Not a directory: " + resourcesDir.getAbsolutePath() );
}
else
{
getLog().debug( "Copying resources from " + resourcesDir.getAbsolutePath() );
// hopefully available from FileUtils 1.0.5-SNAPSHOT
//FileUtils.copyDirectoryStructure( resourcesDir , workDirectory );
// this may needs to be parametrized somehow
String excludes = concat( DirectoryScanner.DEFAULTEXCLUDES, ", " );
copyDirectoryStructure( resourcesDir, workDirectory, "**", excludes );
}
}
}
private static String concat( String[] array, String delim )
{
StringBuffer buffer = new StringBuffer();
for ( int i = 0; i < array.length; i++ )
{
if ( i > 0 )
{
buffer.append( delim );
}
String s = array[i];
buffer.append( s ).append( delim );
}
return buffer.toString();
}
private void copyDirectoryStructure( File sourceDirectory, File destinationDirectory, String includes,
String excludes )
throws IOException
{
if ( ! sourceDirectory.exists() )
{
return;
}
List files = FileUtils.getFiles( sourceDirectory, includes, excludes );
for ( Iterator i = files.iterator(); i.hasNext(); )
{
File file = (File) i.next();
getLog().debug( "Copying " + file + " to " + destinationDirectory );
String path = file.getAbsolutePath().substring( sourceDirectory.getAbsolutePath().length() + 1 );
File destDir = new File( destinationDirectory, path );
getLog().debug( "Copying " + file + " to " + destDir );
if ( file.isDirectory() )
{
destDir.mkdirs();
}
else
{
FileUtils.copyFileToDirectory( file, destDir.getParentFile() );
}
}
}
/**
* Conditionally copy the file into the target directory.
* The operation is not performed when the target file exists and is up to date.
* The target file name is taken from the <code>sourceFile</code> name.
*
* @return <code>true</code> when the file was copied, <code>false</code> otherwise.
* @throws IllegalArgumentException if sourceFile is <code>null</code> or
* <code>sourceFile.getName()</code> is <code>null</code>
* @throws IOException if an error occurs attempting to copy the file.
*/
protected boolean copyFileToDirectoryIfNecessary( File sourceFile, File targetDirectory ) throws IOException
{
if ( sourceFile == null )
{
throw new IllegalArgumentException( "sourceFile is null" );
}
File targetFile = new File( targetDirectory, sourceFile.getName() );
boolean shouldCopy = ! targetFile.exists() || ( targetFile.lastModified() < sourceFile.lastModified() );
if ( shouldCopy )
{
FileUtils.copyFileToDirectory( sourceFile, targetDirectory );
}
else
{
getLog().debug( "Source file hasn't changed. Do not overwrite "
+ targetFile + " with " + sourceFile + "." );
}
return shouldCopy;
}
/**
* Conditionally copy the jar file into the target directory.
* The operation is not performed when a signed target file exists and is up to date.
* The signed target file name is taken from the <code>sourceFile</code> name.E
* The unsigned target file name is taken from the <code>sourceFile</code> name prefixed with UNPROCESSED_PREFIX.
* TODO this is confusing if the sourceFile is already signed. By unsigned we really mean 'unsignedbyus'
*
* @return <code>true</code> when the file was copied, <code>false</code> otherwise.
* @throws IllegalArgumentException if sourceFile is <code>null</code> or
* <code>sourceFile.getName()</code> is <code>null</code>
* @throws IOException if an error occurs attempting to copy the file.
*/
protected boolean copyJarAsUnprocessedToDirectoryIfNecessary( File sourceFile, File targetDirectory ) throws IOException
{
if ( sourceFile == null )
{
throw new IllegalArgumentException( "sourceFile is null" );
}
File signedTargetFile = new File( targetDirectory, sourceFile.getName() );
File unsignedTargetFile = new File( targetDirectory, UNPROCESSED_PREFIX + sourceFile.getName() );
boolean shouldCopy = ! signedTargetFile.exists() || ( signedTargetFile.lastModified() < sourceFile.lastModified() );
shouldCopy = shouldCopy && (! unsignedTargetFile.exists() || ( unsignedTargetFile.lastModified() < sourceFile.lastModified() ) );
if ( shouldCopy )
{
FileUtils.copyFile( sourceFile, unsignedTargetFile );
}
else
{
getLog().debug( "Source file hasn't changed. Do not reprocess "
+ signedTargetFile + " with " + sourceFile + "." );
}
return shouldCopy;
}
/**
* If sign is enabled, sign the jars, otherwise rename them into final jars
*/
protected void signOrRenameJars() throws MojoExecutionException, MojoFailureException
{
if ( getSign() != null )
{
getSign().init(getLog(), getWorkDirectory(), isVerbose());
if( unsignAlreadySignedJars() )
{
removeExistingSignatures( getWorkDirectory(), unprocessedJarFileFilter );
}
if ( isPack200() )
{
// http://java.sun.com/j2se/1.5.0/docs/guide/deployment/deployment-guide/pack200.html
// we need to pack then unpack the files before signing them
Pack200.packJars( getLibDirectory(), unprocessedJarFileFilter, isGzip() );
Pack200.unpackJars( getLibDirectory(), unprocessedPack200FileFilter );
// As out current Pack200 ant tasks don't give us the ability to use a temporary area for
// creating those temporary packing, we have to delete the temporary files.
deleteFiles( getLibDirectory(), unprocessedPack200FileFilter );
// specs says that one should do it twice when there are unsigned jars??
// Pack200.unpackJars( applicationDirectory, updatedPack200FileFilter );
}
int signedJars = signJars( getLibDirectory(), unprocessedJarFileFilter );
if ( signedJars != getModifiedJnlpArtifacts().size() )
{
throw new IllegalStateException(
"The number of signed artifacts (" + signedJars + ") differ from the number of modified "
+ "artifacts (" + getModifiedJnlpArtifacts().size() + "). Implementation error" );
}
} else {
makeUnprocessedFilesFinal( getLibDirectory(), unprocessedJarFileFilter );
}
}
private int makeUnprocessedFilesFinal( File directory, FileFilter fileFilter ) throws MojoExecutionException
{
File[] jarFiles = directory.listFiles( fileFilter );
if ( getLog().isDebugEnabled() )
{
getLog().debug( "makeUnprocessedFilesFinal in " + directory + " found " + jarFiles.length + " file(s) to rename" );
}
if ( jarFiles.length == 0 )
{
return 0;
}
for ( int i = 0; i < jarFiles.length; i++ )
{
String unprocessedJarFileName = jarFiles[i].getName();
if (!unprocessedJarFileName.startsWith( UNPROCESSED_PREFIX )) {
throw new IllegalStateException( "We are about to sign an non " + UNPROCESSED_PREFIX
+ " file with path: " + jarFiles[i].getAbsolutePath() );
}
File finalJar = new File( jarFiles[i].getParent(), unprocessedJarFileName.substring( UNPROCESSED_PREFIX.length() ) );
if ( finalJar.exists() ) {
boolean deleted = finalJar.delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete obsolete final jar: " + finalJar.getAbsolutePath() );
}
}
boolean renamed = jarFiles[i].renameTo( finalJar );
if (! renamed) {
throw new IllegalStateException( "Couldn't rename into final jar: " + finalJar.getAbsolutePath() );
}
}
return jarFiles.length;
}
/**
* @return the number of deleted files
*/
private int deleteFiles( File directory, FileFilter fileFilter ) throws MojoExecutionException
{
File[] files = directory.listFiles( fileFilter );
if ( getLog().isDebugEnabled() )
{
getLog().debug( "deleteFiles in " + directory + " found " + files.length + " file(s) to delete" );
}
if ( files.length == 0 )
{
return 0;
}
for ( int i = 0; i < files.length; i++ )
{
boolean deleted = files[i].delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete file: " + files[i].getAbsolutePath() );
}
}
return files.length;
}
/**
* @return the number of signed jars
*/
private int signJars( File directory, FileFilter fileFilter ) throws MojoExecutionException, MojoFailureException
{
File[] jarFiles = directory.listFiles( fileFilter );
if ( getLog().isDebugEnabled() )
{
getLog().debug( "signJars in " + directory + " found " + jarFiles.length + " jar(s) to sign" );
}
if ( jarFiles.length == 0 )
{
return 0;
}
JarSignerMojo jarSigner = getSign().getJarSignerMojo();
for ( int i = 0; i < jarFiles.length; i++ )
{
String unprocessedJarFileName = jarFiles[i].getName();
if (!unprocessedJarFileName.startsWith( UNPROCESSED_PREFIX )) {
throw new IllegalStateException( "We are about to sign an non " + UNPROCESSED_PREFIX
+ " file with path: " + jarFiles[i].getAbsolutePath() );
}
jarSigner.setJarPath( jarFiles[i] );
File signedJar = new File( jarFiles[i].getParent(), unprocessedJarFileName.substring( UNPROCESSED_PREFIX.length() ) );
jarSigner.setSignedJar( signedJar );
if ( signedJar.exists() ) {
boolean deleted = signedJar.delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete obsolete signed jar: " + signedJar.getAbsolutePath() );
}
}
jarSigner.execute();
getLog().debug( "lastModified signedJar:" + signedJar.lastModified() + " unprocessed signed Jar:" + jarFiles[i].lastModified() );
// remove unprocessed files
// TODO wouldn't have to do that if we copied the unprocessed jar files in a temporary area
boolean deleted = jarFiles[i].delete();
if (! deleted) {
throw new IllegalStateException( "Couldn't delete obsolete unprocessed jar: " + jarFiles[i].getAbsolutePath() );
}
}
return jarFiles.length;
}
protected URL findDefaultJnlpTemplateURL()
{
URL url = this.getClass().getClassLoader().getResource( "default-jnlp-template.vm" );
return url;
}
protected URL getWebstartJarURL()
{
String url = findDefaultJnlpTemplateURL().toString();
try {
return new URL( url.substring( "jar:".length(), url.indexOf( "!" ) ) );
} catch ( Exception e )
{
IllegalStateException iae = new IllegalStateException( "Failure to find webstart Jar URL: " + e.getMessage() );
iae.initCause( e );
throw iae;
}
}
/** @return something of the form jar:file:..../webstart-maven-plugin-.....jar!/ */
protected String getWebstartJarURLForVelocity()
{
String url = findDefaultJnlpTemplateURL().toString();
return url.substring( 0, url.indexOf( "!" ) + 2 );
}
/**
* Removes the signature of the files in the specified directory which satisfy the
* specified filter.
*
* @return the number of unsigned jars
*/
protected int removeExistingSignatures(File workDirectory, FileFilter updatedJarFileFilter)
throws MojoExecutionException
{
verboseLog("Start removing existing signatures");
// cleanup tempDir if exists
File tempDir = new File( workDirectory, "temp_extracted_jars" );
removeDirectory(tempDir);
// recreate temp dir
if ( !tempDir.mkdirs() ) {
throw new MojoExecutionException( "Error creating temporary directory: " + tempDir );
}
// process jars
File[] jarFiles = workDirectory.listFiles( updatedJarFileFilter );
for ( int i = 0; i < jarFiles.length; i++ )
{
if ( isJarSigned( jarFiles[i] ) )
{
verboseLog("remove signature from : " + jarFiles[i]);
unsignJarFile( jarFiles[i], tempDir );
} else
{
verboseLog("not signed : " + jarFiles[i]);
}
}
// cleanup tempDir
removeDirectory(tempDir);
return jarFiles.length; // FIXME this is wrong. Not all jars are signed.
}
private boolean isJarSigned(File jarFile)
{
JarSignVerifyMojo verifyMojo = setupVerifyMojo();
verifyMojo.setJarPath( jarFile );
try {
verifyMojo.execute();
return true;
} catch (MojoExecutionException e) {
return false;
}
}
private void unsignJarFile(File jarFile, File tempDir) throws MojoExecutionException
{
JarUnsignMojo unsignJar = new JarUnsignMojo();
unsignJar.setTempDir( tempDir );
unsignJar.setVerbose( isVerbose() );
unsignJar.setArchiverManager( archiverManager );
unsignJar.setJarPath( jarFile );
unsignJar.execute();
}
/**
* Returns a configured instance of the JarSignVerifyMojo to test whether a
* jar is already signed. The Mojo throws an exception to indicate that a
* jar is not signed yet.
*
* @return a configured instance of the JarSignVerifyMojo.
*/
private JarSignVerifyMojo setupVerifyMojo()
{
JarSignVerifyMojo verifyMojo = new JarSignVerifyMojo();
verifyMojo.setErrorWhenNotSigned(true);
verifyMojo.setWorkingDir(getWorkDirectory());
return verifyMojo;
}
/**
* This is to try to workaround an issue with setting setLastModified.
* See MWEBSTART-28. May be removed later on if that doesn't help.
*/
/*
private boolean setLastModified( File file, long timestamp )
{
boolean result;
int nbretries = 3;
while ( ! (result = file.setLastModified( timestamp )) && ( nbretries-- > 0 ) )
{
getLog().warn("failure to change last modified timestamp... retrying ... See MWEBSTART-28. (especially if you're on NFS).");
try
{
Thread.sleep( 4000 );
}
catch (InterruptedException ignore) {
//TODO should not be ignoring, because this class doesn't control the Thread policy
}
}
return result;
}
*/
protected void packJars()
{
if ( isPack200() )
{
getLog().debug( "packing jars" );
Pack200.packJars( getWorkDirectory(), processedJarFileFilter, isGzip() );
}
}
/**
* TODO finish comment
*
* @param artifact
* @param mainClass
* @return
* @throws MalformedURLException
*/
protected boolean artifactContainsClass( Artifact artifact, final String mainClass ) throws MalformedURLException
{
boolean containsClass = true;
// JarArchiver.grabFilesAndDirs()
ClassLoader cl = new java.net.URLClassLoader( new URL[]{artifact.getFile().toURI().toURL()} );
Class c = null;
try
{
c = Class.forName( mainClass, false, cl );
}
catch ( ClassNotFoundException e )
{
getLog().debug( "artifact " + artifact + " doesn't contain the main class: " + mainClass );
containsClass = false;
}
catch ( Throwable t )
{
getLog().info( "artifact " + artifact + " seems to contain the main class: " + mainClass +
" but the jar doesn't seem to contain all dependencies " + t.getMessage() );
}
if ( c != null )
{
getLog().debug( "Checking if the loaded class contains a main method." );
try
{
c.getMethod( "main", new Class[]{String[].class} );
}
catch ( NoSuchMethodException e )
{
getLog().warn( "The specified main class (" + mainClass +
") doesn't seem to contain a main method... Please check your configuration." + e.getMessage() );
}
catch ( NoClassDefFoundError e )
{
// undocumented in SDK 5.0. is this due to the ClassLoader lazy loading the Method thus making this a case tackled by the JVM Spec (Ref 5.3.5)!
// Reported as Incident 633981 to Sun just in case ...
getLog().warn( "Something failed while checking if the main class contains the main() method. " +
"This is probably due to the limited classpath we have provided to the class loader. " +
"The specified main class (" + mainClass +
") found in the jar is *assumed* to contain a main method... " + e.getMessage() );
}
catch ( Throwable t )
{
getLog().error( "Unknown error: Couldn't check if the main class has a main method. " +
"The specified main class (" + mainClass +
") found in the jar is *assumed* to contain a main method...", t );
}
}
return containsClass;
}
private static class CompositeFileFilter implements FileFilter
{
private List fileFilters = new ArrayList();
CompositeFileFilter( FileFilter filter1, FileFilter filter2 )
{
if ( filter1 == null )
{
throw new IllegalArgumentException( "filter1 must not be null" );
}
if ( filter2 == null )
{
throw new IllegalArgumentException( "filter2 must not be null" );
}
fileFilters.add( filter1 );
fileFilters.add( filter2 );
}
public boolean accept( File pathname )
{
for ( int i = 0; i < fileFilters.size(); i++ )
{
if ( ! ( (FileFilter) fileFilters.get( i ) ).accept( pathname ) )
{
return false;
}
}
return true;
}
}
// anonymous to inner to work-around qdox 1.6.1 bug (MPLUGIN-26)
private static class UnprocessedPack200FileFilter implements FileFilter {
public boolean accept( File pathname )
{
return pathname.isFile() &&
pathname.getName().startsWith( UNPROCESSED_PREFIX ) &&
( pathname.getName().endsWith( ".jar.pack.gz" ) || pathname.getName().endsWith( ".jar.pack" ) );
}
};
/**
*
* @return true if already signed jars should be unsigned prior to signing
* with own key.
*/
protected boolean unsignAlreadySignedJars()
{
return unsignAlreadySignedJars;
}
/**
* Delete the specified directory.
*
* @param dir
* the directory to delete
* @throws MojoExecutionException
*/
private void removeDirectory(File dir) throws MojoExecutionException
{
if (dir != null)
{
if (dir.exists() && dir.isDirectory())
{
getLog().info("Deleting directory " + dir.getAbsolutePath());
Utils.removeDir(dir);
}
}
}
/**
* Log as info when verbose or info is enabled, as debug otherwise.
*/
protected void verboseLog( String msg )
{
infoOrDebug( isVerbose() || getLog().isInfoEnabled(), msg );
}
/** if info is true, log as info(), otherwise as debug() */
private void infoOrDebug( boolean info , String msg )
{
if ( info )
{
getLog().info( msg );
}
else
{
getLog().debug( msg );
}
}
}
| MWEBSTART-107 prevent double jar signing
git-svn-id: 677255a72fc9a6946e9326765bd35a363b49ea29@6919 52ab4f32-60fc-0310-b215-8acea882cd1b
| webstart-maven-plugin/src/main/java/org/codehaus/mojo/webstart/AbstractBaseJnlpMojo.java | MWEBSTART-107 prevent double jar signing |
|
Java | epl-1.0 | 68d4f5f6e7e42a7725c759da1018988d8c2316de | 0 | AGETO/hybris-maven-plugin | package com.divae.ageto.hybris.install.task.metadata;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Properties;
import com.divae.ageto.hybris.install.extensions.Extension;
import com.divae.ageto.hybris.install.extensions.binary.ClassFolder;
import com.divae.ageto.hybris.install.extensions.binary.ExtensionBinary;
import com.divae.ageto.hybris.install.extensions.binary.JARArchive;
import com.divae.ageto.hybris.install.extensions.binary.None;
import com.google.common.base.Throwables;
/**
* @author Marvin Haagen
*/
public enum ExtensionMetadataFile {
;
private static String FILE_NAME_FORMAT = "%s-metadata.properties";
public static File createMetadataFile(final Extension extension, final File workDirectory) {
final File metadataFile = new File(new File(workDirectory, extension.getName()),
getMetadataFileName(extension.getName()).toString());
final Properties properties = new Properties();
try {
properties.setProperty("extension.name", extension.getName());
properties.setProperty("extension.directory", extension.getBaseDirectory().toString());
addExtensionBinaryProperties(properties, extension);
properties.store(new FileOutputStream(metadataFile), null);
return metadataFile;
} catch (IOException exception) {
throw Throwables.propagate(exception);
}
}
public static ExtensionProperties readMetadataFile(final File workDirectory, final String extensionName) {
final File metadataFile = new File(new File(workDirectory, extensionName), getMetadataFileName(extensionName).toString());
Properties properties = new Properties();
try {
properties.load(new FileInputStream(metadataFile));
final String name = properties.getProperty("extension.name");
final File baseFile = new File(properties.getProperty("extension.directory"));
final ExtensionBinary binary = getExtensionBinary(properties);
return new ExtensionProperties(baseFile);
} catch (final IOException exception) {
throw Throwables.propagate(exception);
}
}
private static ExtensionBinary getExtensionBinary(final Properties properties) {
final String type = properties.getProperty("extension.binary.type");
if (type.equals(new None().getType())) {
return new None();
}
if (type.equals(new JARArchive(new File("")).getType())) {
return new JARArchive(new File(properties.getProperty("extension.binary.path")));
}
if (type.equals(new ClassFolder(new File("")).getType())) {
return new ClassFolder(new File(properties.getProperty("extension.binary.path")));
}
throw new RuntimeException("Invalid type: " + type);
}
private static void addExtensionBinaryProperties(final Properties config, final Extension extension) {
config.setProperty("extension.binary.type", extension.getBinary().getType());
if (extension.getBinary().getClass() != None.class) {
config.setProperty("extension.binary.path", extension.getBinary().getExtensionBinaryPath().toString());
}
}
private static File getMetadataFileName(final String extensionName) {
return new File(String.format(FILE_NAME_FORMAT, extensionName));
}
}
| src/main/java/com/divae/ageto/hybris/install/task/metadata/ExtensionMetadataFile.java | package com.divae.ageto.hybris.install.task.metadata;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.Properties;
import com.divae.ageto.hybris.install.extensions.Extension;
import com.divae.ageto.hybris.install.extensions.binary.ClassFolder;
import com.divae.ageto.hybris.install.extensions.binary.ExtensionBinary;
import com.divae.ageto.hybris.install.extensions.binary.JARArchive;
import com.divae.ageto.hybris.install.extensions.binary.None;
import com.google.common.base.Throwables;
/**
* @author Marvin Haagen
*/
public enum ExtensionMetadataFile {
;
private static String fileNameFormat = "%s-metadata.properties";
public static File createMetadataFile(final Extension extension, final File workDirectory) {
File metadataFile = new File(new File(workDirectory, extension.getName()),
getMetadataFileName(extension.getName()).toString());
Properties properties = new Properties();
try {
properties.setProperty("extension.name", extension.getName());
properties.setProperty("extension.directory", extension.getBaseDirectory().toString());
addExtensionBinaryProperties(properties, extension);
properties.store(new FileOutputStream(metadataFile), null);
} catch (IOException e) {
Throwables.propagate(e);
}
return metadataFile;
}
public static ExtensionProperties readMetadataFile(final File workDirectory, final String extensionName) {
File metadataFile = new File(new File(workDirectory, extensionName), getMetadataFileName(extensionName).toString());
Properties properties = new Properties();
try {
properties.load(new FileInputStream(metadataFile));
String name = properties.getProperty("extension.name");
File baseFile = new File(properties.getProperty("extension.directory"));
ExtensionBinary binary = getExtensionBinary(properties);
return new ExtensionProperties(baseFile);
} catch (IOException e) {
Throwables.propagate(e);
}
return null;
}
private static ExtensionBinary getExtensionBinary(Properties properties) {
String type = properties.getProperty("extension.binary.type");
if (type.equals(new None().getType())) {
return new None();
}
if (type.equals(new JARArchive(new File("")).getType())) {
return new JARArchive(new File(properties.getProperty("extension.binary.path")));
}
if (type.equals(new ClassFolder(new File("")).getType())) {
return new ClassFolder(new File(properties.getProperty("extension.binary.path")));
}
throw new RuntimeException("Invalid type: " + type);
}
private static void addExtensionBinaryProperties(Properties config, Extension extension) {
config.setProperty("extension.binary.type", extension.getBinary().getType());
if (extension.getBinary().getClass() != None.class) {
config.setProperty("extension.binary.path", extension.getBinary().getExtensionBinaryPath().toString());
}
}
private static File getMetadataFileName(final String extensionName) {
return new File(String.format(fileNameFormat, extensionName));
}
}
| cleanup and codestyle
| src/main/java/com/divae/ageto/hybris/install/task/metadata/ExtensionMetadataFile.java | cleanup and codestyle |
|
Java | epl-1.0 | 1d04468d1f05b86ed55b5909a8682c7674869cf0 | 0 | opendaylight/netvirt,opendaylight/netvirt,opendaylight/netvirt,opendaylight/netvirt,opendaylight/netvirt | /*
* Copyright (c) 2017 Intel Corporation and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.netvirt.qosservice;
import com.google.common.base.Optional;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.md.sal.binding.api.WriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.genius.infra.ManagedNewTransactionRunner;
import org.opendaylight.genius.infra.ManagedNewTransactionRunnerImpl;
import org.opendaylight.genius.mdsalutil.ActionInfo;
import org.opendaylight.genius.mdsalutil.FlowEntity;
import org.opendaylight.genius.mdsalutil.InstructionInfo;
import org.opendaylight.genius.mdsalutil.MDSALUtil;
import org.opendaylight.genius.mdsalutil.MatchInfo;
import org.opendaylight.genius.mdsalutil.MetaDataUtil;
import org.opendaylight.genius.mdsalutil.NwConstants;
import org.opendaylight.genius.mdsalutil.actions.ActionNxResubmit;
import org.opendaylight.genius.mdsalutil.actions.ActionSetFieldDscp;
import org.opendaylight.genius.mdsalutil.instructions.InstructionApplyActions;
import org.opendaylight.genius.mdsalutil.interfaces.IMdsalApiManager;
import org.opendaylight.genius.mdsalutil.matches.MatchEthernetType;
import org.opendaylight.genius.mdsalutil.matches.MatchMetadata;
import org.opendaylight.genius.utils.ServiceIndex;
import org.opendaylight.infrautils.jobcoordinator.JobCoordinator;
import org.opendaylight.netvirt.neutronvpn.interfaces.INeutronVpnManager;
import org.opendaylight.ovsdb.utils.southbound.utils.SouthboundUtils;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.interfaces.rev140508.InterfacesState;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.interfaces.rev140508.interfaces.state.Interface;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev130715.Uuid;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.list.Instruction;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.BridgeInterfaceInfo;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.BridgeRefInfo;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge._interface.info.BridgeEntry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge._interface.info.BridgeEntryKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge.ref.info.BridgeRefEntry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge.ref.info.BridgeRefEntryKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.GetDpidFromInterfaceInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.GetDpidFromInterfaceInputBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.GetDpidFromInterfaceOutput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.OdlInterfaceRpcService;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.ServiceBindings;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.ServiceModeIngress;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.ServiceTypeFlowBased;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.StypeOpenflow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.StypeOpenflowBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.ServicesInfo;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.ServicesInfoKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.services.info.BoundServices;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.services.info.BoundServicesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.services.info.BoundServicesKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.NetworkMaps;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.Subnetmaps;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.networkmaps.NetworkMap;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.networkmaps.NetworkMapKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.subnetmaps.Subnetmap;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.subnetmaps.SubnetmapKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.networks.rev150712.networks.attributes.networks.Network;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.ports.rev150712.port.attributes.FixedIps;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.ports.rev150712.ports.attributes.ports.Port;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.ext.rev160613.QosNetworkExtension;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.ext.rev160613.QosPortExtension;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.QosPolicy;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.qos.policy.BandwidthLimitRules;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.qos.policy.BandwidthLimitRulesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.qos.policy.DscpmarkingRules;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbBridgeRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbTerminationPointAugmentation;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbTerminationPointAugmentationBuilder;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPoint;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointBuilder;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointKey;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class QosNeutronUtils {
private static final Logger LOG = LoggerFactory.getLogger(QosNeutronUtils.class);
private final ConcurrentMap<Uuid, QosPolicy> qosPolicyMap = new ConcurrentHashMap<>();
private final ConcurrentMap<Uuid, ConcurrentMap<Uuid, Port>> qosPortsMap = new ConcurrentHashMap<>();
private final ConcurrentMap<Uuid, ConcurrentMap<Uuid, Network>> qosNetworksMap = new ConcurrentHashMap<>();
private final CopyOnWriteArraySet<Uuid> qosServiceConfiguredPorts = new CopyOnWriteArraySet<>();
private final ConcurrentHashMap<Uuid, Port> neutronPortMap = new ConcurrentHashMap<>();
private final ConcurrentHashMap<Uuid, Network> neutronNetworkMap = new ConcurrentHashMap<>();
private final QosEosHandler qosEosHandler;
private final INeutronVpnManager neutronVpnManager;
private final OdlInterfaceRpcService odlInterfaceRpcService;
private final DataBroker dataBroker;
private final ManagedNewTransactionRunner txRunner;
private final IMdsalApiManager mdsalUtils;
private final JobCoordinator jobCoordinator;
@Inject
public QosNeutronUtils(final QosEosHandler qosEosHandler, final INeutronVpnManager neutronVpnManager,
final OdlInterfaceRpcService odlInterfaceRpcService, final DataBroker dataBroker,
final IMdsalApiManager mdsalUtils, final JobCoordinator jobCoordinator) {
this.qosEosHandler = qosEosHandler;
this.neutronVpnManager = neutronVpnManager;
this.odlInterfaceRpcService = odlInterfaceRpcService;
this.dataBroker = dataBroker;
this.txRunner = new ManagedNewTransactionRunnerImpl(dataBroker);
this.mdsalUtils = mdsalUtils;
this.jobCoordinator = jobCoordinator;
}
public void addToQosPolicyCache(QosPolicy qosPolicy) {
qosPolicyMap.put(qosPolicy.getUuid(),qosPolicy);
}
public void removeFromQosPolicyCache(QosPolicy qosPolicy) {
qosPolicyMap.remove(qosPolicy.getUuid());
}
public Map<Uuid, QosPolicy> getQosPolicyMap() {
return qosPolicyMap;
}
public Collection<Port> getQosPorts(Uuid qosUuid) {
final ConcurrentMap<Uuid, Port> portMap = qosPortsMap.get(qosUuid);
return portMap != null ? portMap.values() : Collections.emptyList();
}
public void addToQosPortsCache(Uuid qosUuid, Port port) {
qosPortsMap.computeIfAbsent(qosUuid, key -> new ConcurrentHashMap<>()).putIfAbsent(port.getUuid(), port);
}
public void removeFromQosPortsCache(Uuid qosUuid, Port port) {
if (qosPortsMap.containsKey(qosUuid) && qosPortsMap.get(qosUuid).containsKey(port.getUuid())) {
qosPortsMap.get(qosUuid).remove(port.getUuid(), port);
}
}
public void addToQosNetworksCache(Uuid qosUuid, Network network) {
qosNetworksMap.computeIfAbsent(qosUuid, key -> new ConcurrentHashMap<>()).putIfAbsent(network.getUuid(),
network);
}
public void removeFromQosNetworksCache(Uuid qosUuid, Network network) {
if (qosNetworksMap.containsKey(qosUuid) && qosNetworksMap.get(qosUuid).containsKey(network.getUuid())) {
qosNetworksMap.get(qosUuid).remove(network.getUuid(), network);
}
}
@Nonnull
public Collection<Network> getQosNetworks(Uuid qosUuid) {
final ConcurrentMap<Uuid, Network> networkMap = qosNetworksMap.get(qosUuid);
return networkMap != null ? networkMap.values() : Collections.emptyList();
}
@Nonnull
public List<Uuid> getSubnetIdsFromNetworkId(Uuid networkId) {
InstanceIdentifier<NetworkMap> networkMapId = InstanceIdentifier.builder(NetworkMaps.class)
.child(NetworkMap.class, new NetworkMapKey(networkId)).build();
Optional<NetworkMap> optionalNetworkMap = MDSALUtil.read(LogicalDatastoreType.CONFIGURATION,
networkMapId, dataBroker);
return optionalNetworkMap.isPresent() ? optionalNetworkMap.get().getSubnetIdList() : Collections.emptyList();
}
@Nonnull
protected List<Uuid> getPortIdsFromSubnetId(Uuid subnetId) {
InstanceIdentifier<Subnetmap> subnetMapId = InstanceIdentifier
.builder(Subnetmaps.class)
.child(Subnetmap.class, new SubnetmapKey(subnetId)).build();
Optional<Subnetmap> optionalSubnetmap = MDSALUtil.read(LogicalDatastoreType.CONFIGURATION,
subnetMapId,dataBroker);
return optionalSubnetmap.isPresent() ? optionalSubnetmap.get().getPortList() : Collections.emptyList();
}
public void handleNeutronPortQosAdd(Port port, Uuid qosUuid) {
LOG.debug("Handling Port add and QoS associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
// handle Bandwidth Limit Rules update
if (qosPolicy != null && qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
setPortBandwidthLimits(port, qosPolicy.getBandwidthLimitRules().get(0), tx);
}
// handle DSCP Mark Rules update
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicy.getDscpmarkingRules().get(0));
}
})));
}
public void handleQosInterfaceAdd(Port port, Uuid qosUuid) {
LOG.debug("Handling Port add and QoS associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(), () -> {
// handle DSCP Mark Rules update
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicy.getDscpmarkingRules().get(0));
}
return Collections.emptyList();
});
}
public void handleNeutronPortQosUpdate(Port port, Uuid qosUuidNew, Uuid qosUuidOld) {
LOG.debug("Handling Port QoS update: port: {} qosservice: {}", port.getUuid().getValue(),
qosUuidNew.getValue());
QosPolicy qosPolicyNew = qosPolicyMap.get(qosUuidNew);
QosPolicy qosPolicyOld = qosPolicyMap.get(qosUuidOld);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
// handle Bandwidth Limit Rules update
if (qosPolicyNew != null && qosPolicyNew.getBandwidthLimitRules() != null
&& !qosPolicyNew.getBandwidthLimitRules().isEmpty()) {
setPortBandwidthLimits(port, qosPolicyNew.getBandwidthLimitRules().get(0), tx);
} else {
if (qosPolicyOld != null && qosPolicyOld.getBandwidthLimitRules() != null
&& !qosPolicyOld.getBandwidthLimitRules().isEmpty()) {
BandwidthLimitRulesBuilder bwLimitBuilder = new BandwidthLimitRulesBuilder();
setPortBandwidthLimits(port, bwLimitBuilder
.setMaxBurstKbps(BigInteger.ZERO)
.setMaxKbps(BigInteger.ZERO).build(), tx);
}
}
//handle DSCP Mark Rules update
if (qosPolicyNew != null && qosPolicyNew.getDscpmarkingRules() != null
&& !qosPolicyNew.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicyNew.getDscpmarkingRules().get(0));
} else {
if (qosPolicyOld != null && qosPolicyOld.getDscpmarkingRules() != null
&& !qosPolicyOld.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
}
})));
}
public void handleNeutronPortQosRemove(Port port, Uuid qosUuid) {
LOG.debug("Handling Port QoS removal: port: {} qosservice: {}", port.getUuid().getValue(), qosUuid.getValue());
// check for network qosservice to apply
Network network = neutronVpnManager.getNeutronNetwork(port.getNetworkId());
if (network != null && network.augmentation(QosNetworkExtension.class) != null) {
Uuid networkQosUuid = network.augmentation(QosNetworkExtension.class).getQosPolicyId();
if (networkQosUuid != null) {
handleNeutronPortQosUpdate(port, networkQosUuid, qosUuid);
}
} else {
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
// handle Bandwidth Limit Rules removal
if (qosPolicy != null && qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
BandwidthLimitRulesBuilder bwLimitBuilder = new BandwidthLimitRulesBuilder();
setPortBandwidthLimits(port, bwLimitBuilder
.setMaxBurstKbps(BigInteger.ZERO)
.setMaxKbps(BigInteger.ZERO).build(), tx);
}
// handle DSCP MArk Rules removal
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
})));
}
}
public void handleNeutronPortRemove(Port port, Uuid qosUuid) {
LOG.debug("Handling Port removal and Qos associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(), () -> {
//check if any DSCP rule in the policy
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
return Collections.emptyList();
});
}
public void handleNeutronPortRemove(Port port, Uuid qosUuid, Interface intrf) {
LOG.debug("Handling Port removal and Qos associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(), () -> {
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port, intrf);
}
return Collections.emptyList();
});
}
public void handleNeutronNetworkQosUpdate(Network network, Uuid qosUuid) {
LOG.debug("Handling Network QoS update: net: {} qosservice: {}", network.getUuid().getValue(), qosUuid);
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
if (qosPolicy == null || (qosPolicy.getBandwidthLimitRules() == null
|| qosPolicy.getBandwidthLimitRules().isEmpty())
&& (qosPolicy.getDscpmarkingRules() == null
|| qosPolicy.getDscpmarkingRules().isEmpty())) {
return;
}
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId : subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
if (qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
setPortBandwidthLimits(port, qosPolicy.getBandwidthLimitRules().get(0), tx);
}
if (qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicy.getDscpmarkingRules().get(0));
}
})));
}
}
}
}
public void handleNeutronNetworkQosRemove(Network network, Uuid qosUuid) {
LOG.debug("Handling Network QoS removal: net: {} qosservice: {}", network.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId : subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
if (qosPolicy != null && qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
BandwidthLimitRulesBuilder bwLimitBuilder = new BandwidthLimitRulesBuilder();
setPortBandwidthLimits(port, bwLimitBuilder
.setMaxBurstKbps(BigInteger.ZERO)
.setMaxKbps(BigInteger.ZERO).build(), tx);
}
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
})));
}
}
}
}
public void handleNeutronNetworkQosBwRuleRemove(Network network, BandwidthLimitRules zeroBwLimitRule) {
LOG.debug("Handling Qos Bandwidth Rule Remove, net: {}", network.getUuid().getValue());
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId: subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(), () -> Collections.singletonList(
txRunner.callWithNewWriteOnlyTransactionAndSubmit(
tx -> setPortBandwidthLimits(port, zeroBwLimitRule, tx))));
}
}
}
}
public void handleNeutronNetworkQosDscpRuleRemove(Network network) {
LOG.debug("Handling Qos Dscp Rule Remove, net: {}", network.getUuid().getValue());
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId: subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(), () -> {
unsetPortDscpMark(port);
return Collections.emptyList();
});
}
}
}
}
// TODO Clean up the exception handling
@SuppressWarnings("checkstyle:IllegalCatch")
public void setPortBandwidthLimits(Port port, BandwidthLimitRules bwLimit, WriteTransaction writeConfigTxn) {
if (!qosEosHandler.isQosClusterOwner()) {
LOG.debug("Not Qos Cluster Owner. Ignoring setting bandwidth limits");
return;
}
BigInteger dpId = getDpnForInterface(port.getUuid().getValue());
if (dpId.equals(BigInteger.ZERO)) {
LOG.info("DPN ID for interface {} not found", port.getUuid().getValue());
return;
}
LOG.trace("Setting bandwidth limits {} on Port {}", port.getUuid().getValue(), bwLimit);
OvsdbBridgeRef bridgeRefEntry = getBridgeRefEntryFromOperDS(dpId);
Optional<Node> bridgeNode = MDSALUtil.read(LogicalDatastoreType.OPERATIONAL,
bridgeRefEntry.getValue().firstIdentifierOf(Node.class), dataBroker);
if (!bridgeNode.isPresent()) {
LOG.error("bridge not found for dpn {} port {} in operational datastore", dpId, port.getUuid().getValue());
return;
}
LOG.debug("bridgeNode {}", bridgeNode.get().getNodeId().getValue());
TerminationPoint tp = SouthboundUtils.getTerminationPointByExternalId(bridgeNode.get(),
port.getUuid().getValue());
if (tp == null) {
LOG.debug("Skipping setting of bandwidth limit rules for subport {}",
port.getUuid().getValue());
return;
}
LOG.debug("tp: {}", tp.getTpId().getValue());
OvsdbTerminationPointAugmentation ovsdbTp = tp.augmentation(OvsdbTerminationPointAugmentation.class);
OvsdbTerminationPointAugmentationBuilder tpAugmentationBuilder = new OvsdbTerminationPointAugmentationBuilder();
tpAugmentationBuilder.setName(ovsdbTp.getName());
tpAugmentationBuilder.setIngressPolicingRate(bwLimit.getMaxKbps().longValue());
tpAugmentationBuilder.setIngressPolicingBurst(bwLimit.getMaxBurstKbps().longValue());
TerminationPointBuilder tpBuilder = new TerminationPointBuilder();
tpBuilder.withKey(tp.key());
tpBuilder.addAugmentation(OvsdbTerminationPointAugmentation.class, tpAugmentationBuilder.build());
try {
if (writeConfigTxn != null) {
writeConfigTxn.merge(LogicalDatastoreType.CONFIGURATION, InstanceIdentifier
.create(NetworkTopology.class)
.child(Topology.class, new TopologyKey(SouthboundUtils.OVSDB_TOPOLOGY_ID))
.child(Node.class, bridgeNode.get().key())
.child(TerminationPoint.class, new TerminationPointKey(tp.key())), tpBuilder.build(), true);
} else {
MDSALUtil.syncUpdate(dataBroker, LogicalDatastoreType.CONFIGURATION, InstanceIdentifier
.create(NetworkTopology.class)
.child(Topology.class, new TopologyKey(SouthboundUtils.OVSDB_TOPOLOGY_ID))
.child(Node.class, bridgeNode.get().key())
.child(TerminationPoint.class, new TerminationPointKey(tp.key())), tpBuilder.build());
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Failure while setting BwLimitRule {} to port {} exception ", bwLimit,
port.getUuid().getValue(), e);
} else {
LOG.error("Failure while setting BwLimitRule {} to port {}", bwLimit, port.getUuid().getValue());
}
}
}
public void setPortDscpMarking(Port port, DscpmarkingRules dscpMark) {
if (!qosEosHandler.isQosClusterOwner()) {
LOG.trace("Not Qos Cluster Owner. Ignoring setting DSCP marking");
return;
}
BigInteger dpnId = getDpnForInterface(port.getUuid().getValue());
String ifName = port.getUuid().getValue();
Interface ifState = getInterfaceStateFromOperDS(ifName);
Short dscpValue = dscpMark.getDscpMark();
if (dpnId.equals(BigInteger.ZERO)) {
LOG.info("DPN ID for interface {} not found. Cannot set dscp value {} on port {}",
port.getUuid().getValue(), dscpMark, port.getUuid().getValue());
return;
}
int ipVersions = getIpVersions(port);
//1. OF rules
if (hasIpv4Addr(ipVersions)) {
LOG.trace("setting ipv4 flow for port: {}, dscp: {}", ifName, dscpValue);
addFlow(dpnId, dscpValue, ifName, NwConstants.ETHTYPE_IPV4, ifState);
}
if (hasIpv6Addr(ipVersions)) {
LOG.trace("setting ipv6 flow for port: {}, dscp: {}", ifName, dscpValue);
addFlow(dpnId, dscpValue, ifName, NwConstants.ETHTYPE_IPV6, ifState);
}
if (qosServiceConfiguredPorts.add(port.getUuid())) {
// bind qos service to interface
bindservice(ifName);
}
}
public void unsetPortDscpMark(Port port) {
if (!qosEosHandler.isQosClusterOwner()) {
LOG.debug("Not Qos Cluster Owner. Ignoring unsetting DSCP marking");
return;
}
BigInteger dpnId = getDpnForInterface(port.getUuid().getValue());
String ifName = port.getUuid().getValue();
if (dpnId.equals(BigInteger.ZERO)) {
LOG.debug("DPN ID for port {} not found. Cannot unset dscp value", port.getUuid().getValue());
return;
}
LOG.trace("Removing dscp marking rule from Port {}", port.getUuid().getValue());
Interface intf = getInterfaceStateFromOperDS(ifName);
//unbind service from interface
unbindservice(ifName);
// 1. OF
int ipVersions = getIpVersions(port);
if (hasIpv4Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV4, intf);
}
if (hasIpv6Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV6, intf);
}
qosServiceConfiguredPorts.remove(port.getUuid());
}
public void unsetPortDscpMark(Port port, Interface intrf) {
if (!qosEosHandler.isQosClusterOwner()) {
return;
}
BigInteger dpnId = getDpIdFromInterface(intrf);
String ifName = port.getUuid().getValue();
if (dpnId.equals(BigInteger.ZERO)) {
LOG.error("Unable to retrieve DPN Id for interface {}. Cannot unset dscp value on port", ifName);
return;
}
LOG.trace("Removing dscp marking rule from Port {}", port.getUuid().getValue());
unbindservice(ifName);
int ipVersions = getIpVersions(port);
if (hasIpv4Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV4, intrf);
}
if (hasIpv6Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV6, intrf);
}
qosServiceConfiguredPorts.remove(port.getUuid());
}
private static BigInteger getDpIdFromInterface(org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf
.interfaces.rev140508.interfaces.state.Interface ifState) {
String lowerLayerIf = ifState.getLowerLayerIf().get(0);
NodeConnectorId nodeConnectorId = new NodeConnectorId(lowerLayerIf);
return BigInteger.valueOf(MDSALUtil.getDpnIdFromPortName(nodeConnectorId));
}
public BigInteger getDpnForInterface(String ifName) {
BigInteger nodeId = BigInteger.ZERO;
try {
GetDpidFromInterfaceInput
dpIdInput = new GetDpidFromInterfaceInputBuilder().setIntfName(ifName).build();
Future<RpcResult<GetDpidFromInterfaceOutput>>
dpIdOutput = odlInterfaceRpcService.getDpidFromInterface(dpIdInput);
RpcResult<GetDpidFromInterfaceOutput> dpIdResult = dpIdOutput.get();
if (dpIdResult.isSuccessful()) {
nodeId = dpIdResult.getResult().getDpid();
} else {
LOG.error("Could not retrieve DPN Id for interface {}", ifName);
}
} catch (NullPointerException | InterruptedException | ExecutionException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Exception when getting DPN for interface {} exception ", ifName, e);
} else {
LOG.error("Could not retrieve DPN for interface {}", ifName);
}
}
return nodeId;
}
@Nullable
private BridgeEntry getBridgeEntryFromConfigDS(BigInteger dpnId) {
BridgeEntryKey bridgeEntryKey = new BridgeEntryKey(dpnId);
InstanceIdentifier<BridgeEntry> bridgeEntryInstanceIdentifier = getBridgeEntryIdentifier(bridgeEntryKey);
LOG.debug("Trying to retrieve bridge entry from config for Id: {}", bridgeEntryInstanceIdentifier);
return getBridgeEntryFromConfigDS(bridgeEntryInstanceIdentifier);
}
@Nullable
private BridgeEntry getBridgeEntryFromConfigDS(InstanceIdentifier<BridgeEntry> bridgeEntryInstanceIdentifier) {
return MDSALUtil.read(LogicalDatastoreType.CONFIGURATION, bridgeEntryInstanceIdentifier, dataBroker).orNull();
}
@Nullable
private BridgeRefEntry getBridgeRefEntryFromOperDS(InstanceIdentifier<BridgeRefEntry> dpnBridgeEntryIid) {
return MDSALUtil.read(LogicalDatastoreType.OPERATIONAL, dpnBridgeEntryIid, dataBroker).orNull();
}
@Nullable
private OvsdbBridgeRef getBridgeRefEntryFromOperDS(BigInteger dpId) {
BridgeRefEntryKey bridgeRefEntryKey = new BridgeRefEntryKey(dpId);
InstanceIdentifier<BridgeRefEntry> bridgeRefEntryIid = getBridgeRefEntryIdentifier(bridgeRefEntryKey);
BridgeRefEntry bridgeRefEntry = getBridgeRefEntryFromOperDS(bridgeRefEntryIid);
if (bridgeRefEntry == null) {
// bridge ref entry will be null if the bridge is disconnected from controller.
// In that case, fetch bridge reference from bridge interface entry config DS
BridgeEntry bridgeEntry = getBridgeEntryFromConfigDS(dpId);
if (bridgeEntry == null) {
return null;
}
return bridgeEntry.getBridgeReference();
}
return bridgeRefEntry.getBridgeReference();
}
@Nonnull
private static InstanceIdentifier<BridgeRefEntry> getBridgeRefEntryIdentifier(BridgeRefEntryKey bridgeRefEntryKey) {
return InstanceIdentifier.builder(BridgeRefInfo.class).child(BridgeRefEntry.class, bridgeRefEntryKey).build();
}
@Nonnull
private static InstanceIdentifier<BridgeEntry> getBridgeEntryIdentifier(BridgeEntryKey bridgeEntryKey) {
return InstanceIdentifier.builder(BridgeInterfaceInfo.class).child(BridgeEntry.class, bridgeEntryKey).build();
}
public void removeStaleFlowEntry(Interface intrf, int ethType) {
List<MatchInfo> matches = new ArrayList<>();
BigInteger dpnId = getDpIdFromInterface(intrf);
Integer ifIndex = intrf.getIfIndex();
matches.add(new MatchMetadata(MetaDataUtil.getLportTagMetaData(ifIndex), MetaDataUtil.METADATA_MASK_LPORT_TAG));
FlowEntity flowEntity = MDSALUtil.buildFlowEntity(dpnId, NwConstants.QOS_DSCP_TABLE,
getQosFlowId(NwConstants.QOS_DSCP_TABLE, dpnId, ifIndex, ethType),
QosConstants.QOS_DEFAULT_FLOW_PRIORITY, "QoSRemoveFlow", 0, 0, NwConstants.COOKIE_QOS_TABLE,
matches, null);
mdsalUtils.removeFlow(flowEntity);
}
public void addFlow(BigInteger dpnId, Short dscpValue, String ifName, int ethType, Interface ifState) {
if (ifState == null) {
LOG.debug("Could not find the ifState for interface {}", ifName);
return;
}
Integer ifIndex = ifState.getIfIndex();
List<MatchInfo> matches = new ArrayList<>();
matches.add(new MatchEthernetType(ethType));
matches.add(new MatchMetadata(MetaDataUtil.getLportTagMetaData(ifIndex), MetaDataUtil.METADATA_MASK_LPORT_TAG));
List<ActionInfo> actionsInfos = new ArrayList<>();
actionsInfos.add(new ActionSetFieldDscp(dscpValue));
actionsInfos.add(new ActionNxResubmit(NwConstants.LPORT_DISPATCHER_TABLE));
List<InstructionInfo> instructions = Collections.singletonList(new InstructionApplyActions(actionsInfos));
FlowEntity flowEntity = MDSALUtil.buildFlowEntity(dpnId, NwConstants.QOS_DSCP_TABLE,
getQosFlowId(NwConstants.QOS_DSCP_TABLE, dpnId, ifIndex, ethType),
QosConstants.QOS_DEFAULT_FLOW_PRIORITY, "QoSConfigFlow", 0, 0, NwConstants.COOKIE_QOS_TABLE,
matches, instructions);
mdsalUtils.installFlow(flowEntity);
}
public void removeFlow(BigInteger dpnId, String ifName, int ethType, Interface ifState) {
if (ifState == null) {
LOG.debug("Could not find the ifState for interface {}", ifName);
return;
}
Integer ifIndex = ifState.getIfIndex();
mdsalUtils.removeFlow(dpnId, NwConstants.QOS_DSCP_TABLE,
new FlowId(getQosFlowId(NwConstants.QOS_DSCP_TABLE, dpnId, ifIndex, ethType)));
}
@Nullable
public org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.Interface getInterfaceStateFromOperDS(
String interfaceName) {
return MDSALUtil.read(dataBroker, LogicalDatastoreType.OPERATIONAL,
createInterfaceStateInstanceIdentifier(interfaceName)).orNull();
}
@Nonnull
public static InstanceIdentifier<org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.Interface> createInterfaceStateInstanceIdentifier(
String interfaceName) {
return InstanceIdentifier
.builder(InterfacesState.class)
.child(org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.Interface.class,
new org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.InterfaceKey(
interfaceName))
.build();
}
public void bindservice(String ifName) {
int priority = QosConstants.QOS_DEFAULT_FLOW_PRIORITY;
int instructionKey = 0;
List<Instruction> instructions = new ArrayList<>();
instructions.add(MDSALUtil.buildAndGetGotoTableInstruction(NwConstants.QOS_DSCP_TABLE, ++instructionKey));
short qosServiceIndex = ServiceIndex.getIndex(NwConstants.QOS_SERVICE_NAME, NwConstants.QOS_SERVICE_INDEX);
BoundServices serviceInfo = getBoundServices(
String.format("%s.%s", "qos", ifName), qosServiceIndex,
priority, NwConstants.COOKIE_QOS_TABLE, instructions);
MDSALUtil.syncWrite(dataBroker, LogicalDatastoreType.CONFIGURATION,
buildServiceId(ifName, qosServiceIndex),
serviceInfo);
}
public void unbindservice(String ifName) {
MDSALUtil.syncDelete(dataBroker, LogicalDatastoreType.CONFIGURATION, buildServiceId(ifName,
ServiceIndex.getIndex(NwConstants.QOS_SERVICE_NAME, NwConstants.QOS_SERVICE_INDEX)));
}
private static InstanceIdentifier<BoundServices> buildServiceId(String interfaceName, short qosServiceIndex) {
return InstanceIdentifier.builder(ServiceBindings.class)
.child(ServicesInfo.class, new ServicesInfoKey(interfaceName, ServiceModeIngress.class))
.child(BoundServices.class, new BoundServicesKey(qosServiceIndex)).build();
}
private static BoundServices getBoundServices(String serviceName, short qosServiceIndex, int priority,
BigInteger cookieQosTable, List<Instruction> instructions) {
StypeOpenflowBuilder augBuilder = new StypeOpenflowBuilder().setFlowCookie(cookieQosTable)
.setFlowPriority(priority).setInstruction(instructions);
return new BoundServicesBuilder().withKey(new BoundServicesKey(qosServiceIndex)).setServiceName(serviceName)
.setServicePriority(qosServiceIndex).setServiceType(ServiceTypeFlowBased.class)
.addAugmentation(StypeOpenflow.class, augBuilder.build()).build();
}
@Nonnull
public static String getQosFlowId(short tableId, BigInteger dpId, int lportTag, int ethType) {
return new StringBuilder().append(tableId).append(NwConstants.FLOWID_SEPARATOR).append(dpId)
.append(NwConstants.FLOWID_SEPARATOR).append(lportTag)
.append(NwConstants.FLOWID_SEPARATOR).append(ethType).toString();
}
public boolean portHasQosPolicy(Port port) {
LOG.trace("checking qos policy for port: {}", port.getUuid().getValue());
boolean isQosPolicy = port.augmentation(QosPortExtension.class) != null
&& port.augmentation(QosPortExtension.class).getQosPolicyId() != null;
LOG.trace("portHasQosPolicy for port: {} return value {}", port.getUuid().getValue(), isQosPolicy);
return isQosPolicy;
}
@Nullable
public QosPolicy getQosPolicy(Port port) {
Uuid qosUuid = null;
QosPolicy qosPolicy = null;
if (port.augmentation(QosPortExtension.class) != null) {
qosUuid = port.augmentation(QosPortExtension.class).getQosPolicyId();
} else {
Network network = neutronVpnManager.getNeutronNetwork(port.getNetworkId());
if (network.augmentation(QosNetworkExtension.class) != null) {
qosUuid = network.augmentation(QosNetworkExtension.class).getQosPolicyId();
}
}
if (qosUuid != null) {
qosPolicy = qosPolicyMap.get(qosUuid);
}
return qosPolicy;
}
public boolean hasDscpMarkingRule(QosPolicy qosPolicy) {
if (qosPolicy != null) {
return qosPolicy.getDscpmarkingRules() != null && !qosPolicy.getDscpmarkingRules().isEmpty();
}
return false;
}
public void addToPortCache(Port port) {
neutronPortMap.put(port.getUuid(), port);
}
public void removeFromPortCache(Port port) {
neutronPortMap.remove(port.getUuid());
}
public Port getNeutronPort(Uuid portUuid) {
return neutronPortMap.get(portUuid);
}
public Port getNeutronPort(String portName) {
return getNeutronPort(new Uuid(portName));
}
public void addToNetworkCache(Network network) {
neutronNetworkMap.put(network.getUuid(), network);
}
public void removeFromNetworkCache(Network network) {
neutronNetworkMap.remove(network.getUuid());
}
public Network getNeutronNetwork(Uuid networkUuid) {
return neutronNetworkMap.get(networkUuid);
}
public static BigInteger getDpnIdFromLowerLayerIf(String lowerLayerIf) {
try {
return new BigInteger(lowerLayerIf.substring(lowerLayerIf.indexOf(":") + 1, lowerLayerIf.lastIndexOf(":")));
} catch (NullPointerException e) {
return null;
}
}
public static String getPortNumberFromLowerLayerIf(String lowerLayerIf) {
try {
return (lowerLayerIf.substring(lowerLayerIf.lastIndexOf(":") + 1));
} catch (NullPointerException e) {
return null;
}
}
public int getIpVersions(Port port) {
int versions = 0;
for (FixedIps fixedIp: port.getFixedIps()) {
if (fixedIp.getIpAddress().getIpv4Address() != null) {
versions |= (1 << QosConstants.IPV4_ADDR_MASK_BIT);
} else if (fixedIp.getIpAddress().getIpv6Address() != null) {
versions |= (1 << QosConstants.IPV6_ADDR_MASK_BIT);
}
}
return versions;
}
public boolean hasIpv4Addr(int versions) {
if ((versions & (1 << QosConstants.IPV4_ADDR_MASK_BIT)) != 0) {
return true;
}
return false;
}
public boolean hasIpv6Addr(int versions) {
if ((versions & (1 << QosConstants.IPV6_ADDR_MASK_BIT)) != 0) {
return true;
}
return false;
}
}
| qosservice/impl/src/main/java/org/opendaylight/netvirt/qosservice/QosNeutronUtils.java | /*
* Copyright (c) 2017 Intel Corporation and others. All rights reserved.
*
* This program and the accompanying materials are made available under the
* terms of the Eclipse Public License v1.0 which accompanies this distribution,
* and is available at http://www.eclipse.org/legal/epl-v10.html
*/
package org.opendaylight.netvirt.qosservice;
import com.google.common.base.Optional;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArraySet;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.inject.Inject;
import javax.inject.Singleton;
import org.opendaylight.controller.md.sal.binding.api.DataBroker;
import org.opendaylight.controller.md.sal.binding.api.WriteTransaction;
import org.opendaylight.controller.md.sal.common.api.data.LogicalDatastoreType;
import org.opendaylight.genius.infra.ManagedNewTransactionRunner;
import org.opendaylight.genius.infra.ManagedNewTransactionRunnerImpl;
import org.opendaylight.genius.mdsalutil.ActionInfo;
import org.opendaylight.genius.mdsalutil.FlowEntity;
import org.opendaylight.genius.mdsalutil.InstructionInfo;
import org.opendaylight.genius.mdsalutil.MDSALUtil;
import org.opendaylight.genius.mdsalutil.MatchInfo;
import org.opendaylight.genius.mdsalutil.MetaDataUtil;
import org.opendaylight.genius.mdsalutil.NwConstants;
import org.opendaylight.genius.mdsalutil.actions.ActionNxResubmit;
import org.opendaylight.genius.mdsalutil.actions.ActionSetFieldDscp;
import org.opendaylight.genius.mdsalutil.instructions.InstructionApplyActions;
import org.opendaylight.genius.mdsalutil.interfaces.IMdsalApiManager;
import org.opendaylight.genius.mdsalutil.matches.MatchEthernetType;
import org.opendaylight.genius.mdsalutil.matches.MatchMetadata;
import org.opendaylight.genius.utils.ServiceIndex;
import org.opendaylight.infrautils.jobcoordinator.JobCoordinator;
import org.opendaylight.netvirt.neutronvpn.interfaces.INeutronVpnManager;
import org.opendaylight.ovsdb.utils.southbound.utils.SouthboundUtils;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.interfaces.rev140508.InterfacesState;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.interfaces.rev140508.interfaces.state.Interface;
import org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf.yang.types.rev130715.Uuid;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.inventory.rev130819.FlowId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.flow.types.rev131026.instruction.list.Instruction;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.BridgeInterfaceInfo;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.BridgeRefInfo;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge._interface.info.BridgeEntry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge._interface.info.BridgeEntryKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge.ref.info.BridgeRefEntry;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.meta.rev160406.bridge.ref.info.BridgeRefEntryKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.GetDpidFromInterfaceInput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.GetDpidFromInterfaceInputBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.GetDpidFromInterfaceOutput;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.rpcs.rev160406.OdlInterfaceRpcService;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.ServiceBindings;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.ServiceModeIngress;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.ServiceTypeFlowBased;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.StypeOpenflow;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.StypeOpenflowBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.ServicesInfo;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.ServicesInfoKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.services.info.BoundServices;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.services.info.BoundServicesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.genius.interfacemanager.servicebinding.rev160406.service.bindings.services.info.BoundServicesKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.inventory.rev130819.NodeConnectorId;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.NetworkMaps;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.Subnetmaps;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.networkmaps.NetworkMap;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.networkmaps.NetworkMapKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.subnetmaps.Subnetmap;
import org.opendaylight.yang.gen.v1.urn.opendaylight.netvirt.neutronvpn.rev150602.subnetmaps.SubnetmapKey;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.networks.rev150712.networks.attributes.networks.Network;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.ports.rev150712.port.attributes.FixedIps;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.ports.rev150712.ports.attributes.ports.Port;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.ext.rev160613.QosNetworkExtension;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.ext.rev160613.QosPortExtension;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.QosPolicy;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.qos.policy.BandwidthLimitRules;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.qos.policy.BandwidthLimitRulesBuilder;
import org.opendaylight.yang.gen.v1.urn.opendaylight.neutron.qos.rev160613.qos.attributes.qos.policies.qos.policy.DscpmarkingRules;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbBridgeRef;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbTerminationPointAugmentation;
import org.opendaylight.yang.gen.v1.urn.opendaylight.params.xml.ns.yang.ovsdb.rev150105.OvsdbTerminationPointAugmentationBuilder;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.NetworkTopology;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.Topology;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.TopologyKey;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.Node;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPoint;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointBuilder;
import org.opendaylight.yang.gen.v1.urn.tbd.params.xml.ns.yang.network.topology.rev131021.network.topology.topology.node.TerminationPointKey;
import org.opendaylight.yangtools.yang.binding.InstanceIdentifier;
import org.opendaylight.yangtools.yang.common.RpcResult;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class QosNeutronUtils {
private static final Logger LOG = LoggerFactory.getLogger(QosNeutronUtils.class);
private final ConcurrentMap<Uuid, QosPolicy> qosPolicyMap = new ConcurrentHashMap<>();
private final ConcurrentMap<Uuid, ConcurrentMap<Uuid, Port>> qosPortsMap = new ConcurrentHashMap<>();
private final ConcurrentMap<Uuid, ConcurrentMap<Uuid, Network>> qosNetworksMap = new ConcurrentHashMap<>();
private final CopyOnWriteArraySet<Uuid> qosServiceConfiguredPorts = new CopyOnWriteArraySet<>();
private final ConcurrentHashMap<Uuid, Port> neutronPortMap = new ConcurrentHashMap<>();
private final ConcurrentHashMap<Uuid, Network> neutronNetworkMap = new ConcurrentHashMap<>();
private final QosEosHandler qosEosHandler;
private final INeutronVpnManager neutronVpnManager;
private final OdlInterfaceRpcService odlInterfaceRpcService;
private final DataBroker dataBroker;
private final ManagedNewTransactionRunner txRunner;
private final IMdsalApiManager mdsalUtils;
private final JobCoordinator jobCoordinator;
@Inject
public QosNeutronUtils(final QosEosHandler qosEosHandler, final INeutronVpnManager neutronVpnManager,
final OdlInterfaceRpcService odlInterfaceRpcService, final DataBroker dataBroker,
final IMdsalApiManager mdsalUtils, final JobCoordinator jobCoordinator) {
this.qosEosHandler = qosEosHandler;
this.neutronVpnManager = neutronVpnManager;
this.odlInterfaceRpcService = odlInterfaceRpcService;
this.dataBroker = dataBroker;
this.txRunner = new ManagedNewTransactionRunnerImpl(dataBroker);
this.mdsalUtils = mdsalUtils;
this.jobCoordinator = jobCoordinator;
}
public void addToQosPolicyCache(QosPolicy qosPolicy) {
qosPolicyMap.put(qosPolicy.getUuid(),qosPolicy);
}
public void removeFromQosPolicyCache(QosPolicy qosPolicy) {
qosPolicyMap.remove(qosPolicy.getUuid());
}
public Map<Uuid, QosPolicy> getQosPolicyMap() {
return qosPolicyMap;
}
public Collection<Port> getQosPorts(Uuid qosUuid) {
final ConcurrentMap<Uuid, Port> portMap = qosPortsMap.get(qosUuid);
return portMap != null ? portMap.values() : Collections.emptyList();
}
public void addToQosPortsCache(Uuid qosUuid, Port port) {
qosPortsMap.computeIfAbsent(qosUuid, key -> new ConcurrentHashMap<>()).putIfAbsent(port.getUuid(), port);
}
public void removeFromQosPortsCache(Uuid qosUuid, Port port) {
if (qosPortsMap.containsKey(qosUuid) && qosPortsMap.get(qosUuid).containsKey(port.getUuid())) {
qosPortsMap.get(qosUuid).remove(port.getUuid(), port);
}
}
public void addToQosNetworksCache(Uuid qosUuid, Network network) {
qosNetworksMap.computeIfAbsent(qosUuid, key -> new ConcurrentHashMap<>()).putIfAbsent(network.getUuid(),
network);
}
public void removeFromQosNetworksCache(Uuid qosUuid, Network network) {
if (qosNetworksMap.containsKey(qosUuid) && qosNetworksMap.get(qosUuid).containsKey(network.getUuid())) {
qosNetworksMap.get(qosUuid).remove(network.getUuid(), network);
}
}
@Nonnull
public Collection<Network> getQosNetworks(Uuid qosUuid) {
final ConcurrentMap<Uuid, Network> networkMap = qosNetworksMap.get(qosUuid);
return networkMap != null ? networkMap.values() : Collections.emptyList();
}
@Nonnull
public List<Uuid> getSubnetIdsFromNetworkId(Uuid networkId) {
InstanceIdentifier<NetworkMap> networkMapId = InstanceIdentifier.builder(NetworkMaps.class)
.child(NetworkMap.class, new NetworkMapKey(networkId)).build();
Optional<NetworkMap> optionalNetworkMap = MDSALUtil.read(LogicalDatastoreType.CONFIGURATION,
networkMapId, dataBroker);
return optionalNetworkMap.isPresent() ? optionalNetworkMap.get().getSubnetIdList() : Collections.emptyList();
}
@Nonnull
protected List<Uuid> getPortIdsFromSubnetId(Uuid subnetId) {
InstanceIdentifier<Subnetmap> subnetMapId = InstanceIdentifier
.builder(Subnetmaps.class)
.child(Subnetmap.class, new SubnetmapKey(subnetId)).build();
Optional<Subnetmap> optionalSubnetmap = MDSALUtil.read(LogicalDatastoreType.CONFIGURATION,
subnetMapId,dataBroker);
return optionalSubnetmap.isPresent() ? optionalSubnetmap.get().getPortList() : Collections.emptyList();
}
public void handleNeutronPortQosAdd(Port port, Uuid qosUuid) {
LOG.debug("Handling Port add and QoS associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
// handle Bandwidth Limit Rules update
if (qosPolicy != null && qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
setPortBandwidthLimits(port, qosPolicy.getBandwidthLimitRules().get(0), tx);
}
// handle DSCP Mark Rules update
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicy.getDscpmarkingRules().get(0));
}
})));
}
public void handleQosInterfaceAdd(Port port, Uuid qosUuid) {
LOG.debug("Handling Port add and QoS associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(), () -> {
// handle DSCP Mark Rules update
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicy.getDscpmarkingRules().get(0));
}
return Collections.emptyList();
});
}
public void handleNeutronPortQosUpdate(Port port, Uuid qosUuidNew, Uuid qosUuidOld) {
LOG.debug("Handling Port QoS update: port: {} qosservice: {}", port.getUuid().getValue(),
qosUuidNew.getValue());
QosPolicy qosPolicyNew = qosPolicyMap.get(qosUuidNew);
QosPolicy qosPolicyOld = qosPolicyMap.get(qosUuidOld);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
// handle Bandwidth Limit Rules update
if (qosPolicyNew != null && qosPolicyNew.getBandwidthLimitRules() != null
&& !qosPolicyNew.getBandwidthLimitRules().isEmpty()) {
setPortBandwidthLimits(port, qosPolicyNew.getBandwidthLimitRules().get(0), tx);
} else {
if (qosPolicyOld != null && qosPolicyOld.getBandwidthLimitRules() != null
&& !qosPolicyOld.getBandwidthLimitRules().isEmpty()) {
BandwidthLimitRulesBuilder bwLimitBuilder = new BandwidthLimitRulesBuilder();
setPortBandwidthLimits(port, bwLimitBuilder
.setMaxBurstKbps(BigInteger.ZERO)
.setMaxKbps(BigInteger.ZERO).build(), tx);
}
}
//handle DSCP Mark Rules update
if (qosPolicyNew != null && qosPolicyNew.getDscpmarkingRules() != null
&& !qosPolicyNew.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicyNew.getDscpmarkingRules().get(0));
} else {
if (qosPolicyOld != null && qosPolicyOld.getDscpmarkingRules() != null
&& !qosPolicyOld.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
}
})));
}
public void handleNeutronPortQosRemove(Port port, Uuid qosUuid) {
LOG.debug("Handling Port QoS removal: port: {} qosservice: {}", port.getUuid().getValue(), qosUuid.getValue());
// check for network qosservice to apply
Network network = neutronVpnManager.getNeutronNetwork(port.getNetworkId());
if (network != null && network.augmentation(QosNetworkExtension.class) != null) {
Uuid networkQosUuid = network.augmentation(QosNetworkExtension.class).getQosPolicyId();
if (networkQosUuid != null) {
handleNeutronPortQosUpdate(port, networkQosUuid, qosUuid);
}
} else {
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
// handle Bandwidth Limit Rules removal
if (qosPolicy != null && qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
BandwidthLimitRulesBuilder bwLimitBuilder = new BandwidthLimitRulesBuilder();
setPortBandwidthLimits(port, bwLimitBuilder
.setMaxBurstKbps(BigInteger.ZERO)
.setMaxKbps(BigInteger.ZERO).build(), tx);
}
// handle DSCP MArk Rules removal
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
})));
}
}
public void handleNeutronPortRemove(Port port, Uuid qosUuid) {
LOG.debug("Handling Port removal and Qos associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(), () -> {
//check if any DSCP rule in the policy
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
return Collections.emptyList();
});
}
public void handleNeutronPortRemove(Port port, Uuid qosUuid, Interface intrf) {
LOG.debug("Handling Port removal and Qos associated: port: {} qos: {}", port.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
jobCoordinator.enqueueJob("QosPort-" + port.getUuid().getValue(), () -> {
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port, intrf);
}
return Collections.emptyList();
});
}
public void handleNeutronNetworkQosUpdate(Network network, Uuid qosUuid) {
LOG.debug("Handling Network QoS update: net: {} qosservice: {}", network.getUuid().getValue(), qosUuid);
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
if (qosPolicy == null || (qosPolicy.getBandwidthLimitRules() == null
|| qosPolicy.getBandwidthLimitRules().isEmpty())
&& (qosPolicy.getDscpmarkingRules() == null
|| qosPolicy.getDscpmarkingRules().isEmpty())) {
return;
}
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId : subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
if (qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
setPortBandwidthLimits(port, qosPolicy.getBandwidthLimitRules().get(0), tx);
}
if (qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
setPortDscpMarking(port, qosPolicy.getDscpmarkingRules().get(0));
}
})));
}
}
}
}
public void handleNeutronNetworkQosRemove(Network network, Uuid qosUuid) {
LOG.debug("Handling Network QoS removal: net: {} qosservice: {}", network.getUuid().getValue(),
qosUuid.getValue());
QosPolicy qosPolicy = qosPolicyMap.get(qosUuid);
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId : subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(),
() -> Collections.singletonList(txRunner.callWithNewWriteOnlyTransactionAndSubmit(tx -> {
if (qosPolicy != null && qosPolicy.getBandwidthLimitRules() != null
&& !qosPolicy.getBandwidthLimitRules().isEmpty()) {
BandwidthLimitRulesBuilder bwLimitBuilder = new BandwidthLimitRulesBuilder();
setPortBandwidthLimits(port, bwLimitBuilder
.setMaxBurstKbps(BigInteger.ZERO)
.setMaxKbps(BigInteger.ZERO).build(), tx);
}
if (qosPolicy != null && qosPolicy.getDscpmarkingRules() != null
&& !qosPolicy.getDscpmarkingRules().isEmpty()) {
unsetPortDscpMark(port);
}
})));
}
}
}
}
public void handleNeutronNetworkQosBwRuleRemove(Network network, BandwidthLimitRules zeroBwLimitRule) {
LOG.debug("Handling Qos Bandwidth Rule Remove, net: {}", network.getUuid().getValue());
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId: subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(), () -> Collections.singletonList(
txRunner.callWithNewWriteOnlyTransactionAndSubmit(
tx -> setPortBandwidthLimits(port, zeroBwLimitRule, tx))));
}
}
}
}
public void handleNeutronNetworkQosDscpRuleRemove(Network network) {
LOG.debug("Handling Qos Dscp Rule Remove, net: {}", network.getUuid().getValue());
List<Uuid> subnetIds = getSubnetIdsFromNetworkId(network.getUuid());
for (Uuid subnetId: subnetIds) {
List<Uuid> portIds = getPortIdsFromSubnetId(subnetId);
for (Uuid portId : portIds) {
Port port = getNeutronPort(portId);
if (port != null && (port.augmentation(QosPortExtension.class) == null
|| port.augmentation(QosPortExtension.class).getQosPolicyId() == null)) {
jobCoordinator.enqueueJob("QosPort-" + portId.getValue(), () -> {
unsetPortDscpMark(port);
return Collections.emptyList();
});
}
}
}
}
// TODO Clean up the exception handling
@SuppressWarnings("checkstyle:IllegalCatch")
public void setPortBandwidthLimits(Port port, BandwidthLimitRules bwLimit, WriteTransaction writeConfigTxn) {
if (!qosEosHandler.isQosClusterOwner()) {
LOG.debug("Not Qos Cluster Owner. Ignoring setting bandwidth limits");
return;
}
BigInteger dpId = getDpnForInterface(port.getUuid().getValue());
if (dpId.equals(BigInteger.ZERO)) {
LOG.info("DPN ID for interface {} not found", port.getUuid().getValue());
return;
}
LOG.trace("Setting bandwidth limits {} on Port {}", port.getUuid().getValue(), bwLimit);
OvsdbBridgeRef bridgeRefEntry = getBridgeRefEntryFromOperDS(dpId);
Optional<Node> bridgeNode = MDSALUtil.read(LogicalDatastoreType.OPERATIONAL,
bridgeRefEntry.getValue().firstIdentifierOf(Node.class), dataBroker);
TerminationPoint tp = SouthboundUtils.getTerminationPointByExternalId(bridgeNode.get(),
port.getUuid().getValue());
OvsdbTerminationPointAugmentation ovsdbTp = tp.augmentation(OvsdbTerminationPointAugmentation.class);
OvsdbTerminationPointAugmentationBuilder tpAugmentationBuilder = new OvsdbTerminationPointAugmentationBuilder();
tpAugmentationBuilder.setName(ovsdbTp.getName());
tpAugmentationBuilder.setIngressPolicingRate(bwLimit.getMaxKbps().longValue());
tpAugmentationBuilder.setIngressPolicingBurst(bwLimit.getMaxBurstKbps().longValue());
TerminationPointBuilder tpBuilder = new TerminationPointBuilder();
tpBuilder.withKey(tp.key());
tpBuilder.addAugmentation(OvsdbTerminationPointAugmentation.class, tpAugmentationBuilder.build());
try {
if (writeConfigTxn != null) {
writeConfigTxn.merge(LogicalDatastoreType.CONFIGURATION, InstanceIdentifier
.create(NetworkTopology.class)
.child(Topology.class, new TopologyKey(SouthboundUtils.OVSDB_TOPOLOGY_ID))
.child(Node.class, bridgeNode.get().key())
.child(TerminationPoint.class, new TerminationPointKey(tp.key())), tpBuilder.build(), true);
} else {
MDSALUtil.syncUpdate(dataBroker, LogicalDatastoreType.CONFIGURATION, InstanceIdentifier
.create(NetworkTopology.class)
.child(Topology.class, new TopologyKey(SouthboundUtils.OVSDB_TOPOLOGY_ID))
.child(Node.class, bridgeNode.get().key())
.child(TerminationPoint.class, new TerminationPointKey(tp.key())), tpBuilder.build());
}
} catch (Exception e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Failure while setting BwLimitRule {} to port {} exception ", bwLimit,
port.getUuid().getValue(), e);
} else {
LOG.error("Failure while setting BwLimitRule {} to port {}", bwLimit, port.getUuid().getValue());
}
}
}
public void setPortDscpMarking(Port port, DscpmarkingRules dscpMark) {
if (!qosEosHandler.isQosClusterOwner()) {
LOG.trace("Not Qos Cluster Owner. Ignoring setting DSCP marking");
return;
}
BigInteger dpnId = getDpnForInterface(port.getUuid().getValue());
String ifName = port.getUuid().getValue();
Interface ifState = getInterfaceStateFromOperDS(ifName);
Short dscpValue = dscpMark.getDscpMark();
if (dpnId.equals(BigInteger.ZERO)) {
LOG.info("DPN ID for interface {} not found. Cannot set dscp value {} on port {}",
port.getUuid().getValue(), dscpMark, port.getUuid().getValue());
return;
}
int ipVersions = getIpVersions(port);
//1. OF rules
if (hasIpv4Addr(ipVersions)) {
LOG.trace("setting ipv4 flow for port: {}, dscp: {}", ifName, dscpValue);
addFlow(dpnId, dscpValue, ifName, NwConstants.ETHTYPE_IPV4, ifState);
}
if (hasIpv6Addr(ipVersions)) {
LOG.trace("setting ipv6 flow for port: {}, dscp: {}", ifName, dscpValue);
addFlow(dpnId, dscpValue, ifName, NwConstants.ETHTYPE_IPV6, ifState);
}
if (qosServiceConfiguredPorts.add(port.getUuid())) {
// bind qos service to interface
bindservice(ifName);
}
}
public void unsetPortDscpMark(Port port) {
if (!qosEosHandler.isQosClusterOwner()) {
LOG.debug("Not Qos Cluster Owner. Ignoring unsetting DSCP marking");
return;
}
BigInteger dpnId = getDpnForInterface(port.getUuid().getValue());
String ifName = port.getUuid().getValue();
if (dpnId.equals(BigInteger.ZERO)) {
LOG.debug("DPN ID for port {} not found. Cannot unset dscp value", port.getUuid().getValue());
return;
}
LOG.trace("Removing dscp marking rule from Port {}", port.getUuid().getValue());
Interface intf = getInterfaceStateFromOperDS(ifName);
//unbind service from interface
unbindservice(ifName);
// 1. OF
int ipVersions = getIpVersions(port);
if (hasIpv4Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV4, intf);
}
if (hasIpv6Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV6, intf);
}
qosServiceConfiguredPorts.remove(port.getUuid());
}
public void unsetPortDscpMark(Port port, Interface intrf) {
if (!qosEosHandler.isQosClusterOwner()) {
return;
}
BigInteger dpnId = getDpIdFromInterface(intrf);
String ifName = port.getUuid().getValue();
if (dpnId.equals(BigInteger.ZERO)) {
LOG.error("Unable to retrieve DPN Id for interface {}. Cannot unset dscp value on port", ifName);
return;
}
LOG.trace("Removing dscp marking rule from Port {}", port.getUuid().getValue());
unbindservice(ifName);
int ipVersions = getIpVersions(port);
if (hasIpv4Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV4, intrf);
}
if (hasIpv6Addr(ipVersions)) {
removeFlow(dpnId, ifName, NwConstants.ETHTYPE_IPV6, intrf);
}
qosServiceConfiguredPorts.remove(port.getUuid());
}
private static BigInteger getDpIdFromInterface(org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang.ietf
.interfaces.rev140508.interfaces.state.Interface ifState) {
String lowerLayerIf = ifState.getLowerLayerIf().get(0);
NodeConnectorId nodeConnectorId = new NodeConnectorId(lowerLayerIf);
return BigInteger.valueOf(MDSALUtil.getDpnIdFromPortName(nodeConnectorId));
}
public BigInteger getDpnForInterface(String ifName) {
BigInteger nodeId = BigInteger.ZERO;
try {
GetDpidFromInterfaceInput
dpIdInput = new GetDpidFromInterfaceInputBuilder().setIntfName(ifName).build();
Future<RpcResult<GetDpidFromInterfaceOutput>>
dpIdOutput = odlInterfaceRpcService.getDpidFromInterface(dpIdInput);
RpcResult<GetDpidFromInterfaceOutput> dpIdResult = dpIdOutput.get();
if (dpIdResult.isSuccessful()) {
nodeId = dpIdResult.getResult().getDpid();
} else {
LOG.error("Could not retrieve DPN Id for interface {}", ifName);
}
} catch (NullPointerException | InterruptedException | ExecutionException e) {
if (LOG.isDebugEnabled()) {
LOG.debug("Exception when getting DPN for interface {} exception ", ifName, e);
} else {
LOG.error("Could not retrieve DPN for interface {}", ifName);
}
}
return nodeId;
}
@Nullable
private BridgeEntry getBridgeEntryFromConfigDS(BigInteger dpnId) {
BridgeEntryKey bridgeEntryKey = new BridgeEntryKey(dpnId);
InstanceIdentifier<BridgeEntry> bridgeEntryInstanceIdentifier = getBridgeEntryIdentifier(bridgeEntryKey);
LOG.debug("Trying to retrieve bridge entry from config for Id: {}", bridgeEntryInstanceIdentifier);
return getBridgeEntryFromConfigDS(bridgeEntryInstanceIdentifier);
}
@Nullable
private BridgeEntry getBridgeEntryFromConfigDS(InstanceIdentifier<BridgeEntry> bridgeEntryInstanceIdentifier) {
return MDSALUtil.read(LogicalDatastoreType.CONFIGURATION, bridgeEntryInstanceIdentifier, dataBroker).orNull();
}
@Nullable
private BridgeRefEntry getBridgeRefEntryFromOperDS(InstanceIdentifier<BridgeRefEntry> dpnBridgeEntryIid) {
return MDSALUtil.read(LogicalDatastoreType.OPERATIONAL, dpnBridgeEntryIid, dataBroker).orNull();
}
@Nullable
private OvsdbBridgeRef getBridgeRefEntryFromOperDS(BigInteger dpId) {
BridgeRefEntryKey bridgeRefEntryKey = new BridgeRefEntryKey(dpId);
InstanceIdentifier<BridgeRefEntry> bridgeRefEntryIid = getBridgeRefEntryIdentifier(bridgeRefEntryKey);
BridgeRefEntry bridgeRefEntry = getBridgeRefEntryFromOperDS(bridgeRefEntryIid);
if (bridgeRefEntry == null) {
// bridge ref entry will be null if the bridge is disconnected from controller.
// In that case, fetch bridge reference from bridge interface entry config DS
BridgeEntry bridgeEntry = getBridgeEntryFromConfigDS(dpId);
if (bridgeEntry == null) {
return null;
}
return bridgeEntry.getBridgeReference();
}
return bridgeRefEntry.getBridgeReference();
}
@Nonnull
private static InstanceIdentifier<BridgeRefEntry> getBridgeRefEntryIdentifier(BridgeRefEntryKey bridgeRefEntryKey) {
return InstanceIdentifier.builder(BridgeRefInfo.class).child(BridgeRefEntry.class, bridgeRefEntryKey).build();
}
@Nonnull
private static InstanceIdentifier<BridgeEntry> getBridgeEntryIdentifier(BridgeEntryKey bridgeEntryKey) {
return InstanceIdentifier.builder(BridgeInterfaceInfo.class).child(BridgeEntry.class, bridgeEntryKey).build();
}
public void removeStaleFlowEntry(Interface intrf, int ethType) {
List<MatchInfo> matches = new ArrayList<>();
BigInteger dpnId = getDpIdFromInterface(intrf);
Integer ifIndex = intrf.getIfIndex();
matches.add(new MatchMetadata(MetaDataUtil.getLportTagMetaData(ifIndex), MetaDataUtil.METADATA_MASK_LPORT_TAG));
FlowEntity flowEntity = MDSALUtil.buildFlowEntity(dpnId, NwConstants.QOS_DSCP_TABLE,
getQosFlowId(NwConstants.QOS_DSCP_TABLE, dpnId, ifIndex, ethType),
QosConstants.QOS_DEFAULT_FLOW_PRIORITY, "QoSRemoveFlow", 0, 0, NwConstants.COOKIE_QOS_TABLE,
matches, null);
mdsalUtils.removeFlow(flowEntity);
}
public void addFlow(BigInteger dpnId, Short dscpValue, String ifName, int ethType, Interface ifState) {
if (ifState == null) {
LOG.debug("Could not find the ifState for interface {}", ifName);
return;
}
Integer ifIndex = ifState.getIfIndex();
List<MatchInfo> matches = new ArrayList<>();
matches.add(new MatchEthernetType(ethType));
matches.add(new MatchMetadata(MetaDataUtil.getLportTagMetaData(ifIndex), MetaDataUtil.METADATA_MASK_LPORT_TAG));
List<ActionInfo> actionsInfos = new ArrayList<>();
actionsInfos.add(new ActionSetFieldDscp(dscpValue));
actionsInfos.add(new ActionNxResubmit(NwConstants.LPORT_DISPATCHER_TABLE));
List<InstructionInfo> instructions = Collections.singletonList(new InstructionApplyActions(actionsInfos));
FlowEntity flowEntity = MDSALUtil.buildFlowEntity(dpnId, NwConstants.QOS_DSCP_TABLE,
getQosFlowId(NwConstants.QOS_DSCP_TABLE, dpnId, ifIndex, ethType),
QosConstants.QOS_DEFAULT_FLOW_PRIORITY, "QoSConfigFlow", 0, 0, NwConstants.COOKIE_QOS_TABLE,
matches, instructions);
mdsalUtils.installFlow(flowEntity);
}
public void removeFlow(BigInteger dpnId, String ifName, int ethType, Interface ifState) {
if (ifState == null) {
LOG.debug("Could not find the ifState for interface {}", ifName);
return;
}
Integer ifIndex = ifState.getIfIndex();
mdsalUtils.removeFlow(dpnId, NwConstants.QOS_DSCP_TABLE,
new FlowId(getQosFlowId(NwConstants.QOS_DSCP_TABLE, dpnId, ifIndex, ethType)));
}
@Nullable
public org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.Interface getInterfaceStateFromOperDS(
String interfaceName) {
return MDSALUtil.read(dataBroker, LogicalDatastoreType.OPERATIONAL,
createInterfaceStateInstanceIdentifier(interfaceName)).orNull();
}
@Nonnull
public static InstanceIdentifier<org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.Interface> createInterfaceStateInstanceIdentifier(
String interfaceName) {
return InstanceIdentifier
.builder(InterfacesState.class)
.child(org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.Interface.class,
new org.opendaylight.yang.gen.v1.urn.ietf.params.xml.ns.yang
.ietf.interfaces.rev140508.interfaces.state.InterfaceKey(
interfaceName))
.build();
}
public void bindservice(String ifName) {
int priority = QosConstants.QOS_DEFAULT_FLOW_PRIORITY;
int instructionKey = 0;
List<Instruction> instructions = new ArrayList<>();
instructions.add(MDSALUtil.buildAndGetGotoTableInstruction(NwConstants.QOS_DSCP_TABLE, ++instructionKey));
short qosServiceIndex = ServiceIndex.getIndex(NwConstants.QOS_SERVICE_NAME, NwConstants.QOS_SERVICE_INDEX);
BoundServices serviceInfo = getBoundServices(
String.format("%s.%s", "qos", ifName), qosServiceIndex,
priority, NwConstants.COOKIE_QOS_TABLE, instructions);
MDSALUtil.syncWrite(dataBroker, LogicalDatastoreType.CONFIGURATION,
buildServiceId(ifName, qosServiceIndex),
serviceInfo);
}
public void unbindservice(String ifName) {
MDSALUtil.syncDelete(dataBroker, LogicalDatastoreType.CONFIGURATION, buildServiceId(ifName,
ServiceIndex.getIndex(NwConstants.QOS_SERVICE_NAME, NwConstants.QOS_SERVICE_INDEX)));
}
private static InstanceIdentifier<BoundServices> buildServiceId(String interfaceName, short qosServiceIndex) {
return InstanceIdentifier.builder(ServiceBindings.class)
.child(ServicesInfo.class, new ServicesInfoKey(interfaceName, ServiceModeIngress.class))
.child(BoundServices.class, new BoundServicesKey(qosServiceIndex)).build();
}
private static BoundServices getBoundServices(String serviceName, short qosServiceIndex, int priority,
BigInteger cookieQosTable, List<Instruction> instructions) {
StypeOpenflowBuilder augBuilder = new StypeOpenflowBuilder().setFlowCookie(cookieQosTable)
.setFlowPriority(priority).setInstruction(instructions);
return new BoundServicesBuilder().withKey(new BoundServicesKey(qosServiceIndex)).setServiceName(serviceName)
.setServicePriority(qosServiceIndex).setServiceType(ServiceTypeFlowBased.class)
.addAugmentation(StypeOpenflow.class, augBuilder.build()).build();
}
@Nonnull
public static String getQosFlowId(short tableId, BigInteger dpId, int lportTag, int ethType) {
return new StringBuilder().append(tableId).append(NwConstants.FLOWID_SEPARATOR).append(dpId)
.append(NwConstants.FLOWID_SEPARATOR).append(lportTag)
.append(NwConstants.FLOWID_SEPARATOR).append(ethType).toString();
}
public boolean portHasQosPolicy(Port port) {
LOG.trace("checking qos policy for port: {}", port.getUuid().getValue());
boolean isQosPolicy = port.augmentation(QosPortExtension.class) != null
&& port.augmentation(QosPortExtension.class).getQosPolicyId() != null;
LOG.trace("portHasQosPolicy for port: {} return value {}", port.getUuid().getValue(), isQosPolicy);
return isQosPolicy;
}
@Nullable
public QosPolicy getQosPolicy(Port port) {
Uuid qosUuid = null;
QosPolicy qosPolicy = null;
if (port.augmentation(QosPortExtension.class) != null) {
qosUuid = port.augmentation(QosPortExtension.class).getQosPolicyId();
} else {
Network network = neutronVpnManager.getNeutronNetwork(port.getNetworkId());
if (network.augmentation(QosNetworkExtension.class) != null) {
qosUuid = network.augmentation(QosNetworkExtension.class).getQosPolicyId();
}
}
if (qosUuid != null) {
qosPolicy = qosPolicyMap.get(qosUuid);
}
return qosPolicy;
}
public boolean hasDscpMarkingRule(QosPolicy qosPolicy) {
if (qosPolicy != null) {
return qosPolicy.getDscpmarkingRules() != null && !qosPolicy.getDscpmarkingRules().isEmpty();
}
return false;
}
public void addToPortCache(Port port) {
neutronPortMap.put(port.getUuid(), port);
}
public void removeFromPortCache(Port port) {
neutronPortMap.remove(port.getUuid());
}
public Port getNeutronPort(Uuid portUuid) {
return neutronPortMap.get(portUuid);
}
public Port getNeutronPort(String portName) {
return getNeutronPort(new Uuid(portName));
}
public void addToNetworkCache(Network network) {
neutronNetworkMap.put(network.getUuid(), network);
}
public void removeFromNetworkCache(Network network) {
neutronNetworkMap.remove(network.getUuid());
}
public Network getNeutronNetwork(Uuid networkUuid) {
return neutronNetworkMap.get(networkUuid);
}
public static BigInteger getDpnIdFromLowerLayerIf(String lowerLayerIf) {
try {
return new BigInteger(lowerLayerIf.substring(lowerLayerIf.indexOf(":") + 1, lowerLayerIf.lastIndexOf(":")));
} catch (NullPointerException e) {
return null;
}
}
public static String getPortNumberFromLowerLayerIf(String lowerLayerIf) {
try {
return (lowerLayerIf.substring(lowerLayerIf.lastIndexOf(":") + 1));
} catch (NullPointerException e) {
return null;
}
}
public int getIpVersions(Port port) {
int versions = 0;
for (FixedIps fixedIp: port.getFixedIps()) {
if (fixedIp.getIpAddress().getIpv4Address() != null) {
versions |= (1 << QosConstants.IPV4_ADDR_MASK_BIT);
} else if (fixedIp.getIpAddress().getIpv6Address() != null) {
versions |= (1 << QosConstants.IPV6_ADDR_MASK_BIT);
}
}
return versions;
}
public boolean hasIpv4Addr(int versions) {
if ((versions & (1 << QosConstants.IPV4_ADDR_MASK_BIT)) != 0) {
return true;
}
return false;
}
public boolean hasIpv6Addr(int versions) {
if ((versions & (1 << QosConstants.IPV6_ADDR_MASK_BIT)) != 0) {
return true;
}
return false;
}
}
| Do not apply rate limit rule on subport
Fix details: QoS rate limit rule is applicable on a neutron
port (at trunk level) and can not be applied at subport level.
If port has multiple subports then its total limit of all subports.
Only QoS DSCP policy can be applied on sub port.
Change-Id: Ie75439aa0695412cb989f6076e8d783ed0fdcbb4
Signed-off-by: Arun Sharma <[email protected]>
| qosservice/impl/src/main/java/org/opendaylight/netvirt/qosservice/QosNeutronUtils.java | Do not apply rate limit rule on subport |
|
Java | lgpl-2.1 | e1a0d4c0563174d343f00d61829016f8d4980a41 | 0 | viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,SensorsINI/jaer,viktorbahr/jaer,viktorbahr/jaer | /** EventPacket.java
*
* Created on October 29, 2005, 10:18 PM
*
* To change this template, choose Tools | Options and locate the template under
* the Source Creation and Management node. Right-click the template and choose
* Open. You can then make changes to the template in the Source Editor.
*/
package net.sf.jaer.event;
import net.sf.jaer.eventprocessing.*;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.util.Iterator;
import java.util.logging.Logger;
import net.sf.jaer.aemonitor.AEConstants;
/**
* A packet of events that is used for rendering and event processing.
For efficiency, these packets are designed to be re-used;
they should be allocated rarely and allowed to grow in size. If a packet capacity needs to be
increased a substantial peformance hit will occur e.g. 1 ms per resizing or initial allocation.
<p>
The EventPacket is prefilled with Events that have default values. One constructor lets
you fill the EventPacket with a subclass of BasicEvent. This prefilling avoids
the overhead of object creation. It also allows easy access to all information contained in the
event and it allows storing arbitrary event information, including
extended type information, in EventPackets.
<p>
However, this reuse of existing objects means that you need to take particular precautions.
* The events that are stored
in a packet are references to objects. Therefore you can assign an event to a different packet
but this event will still be referenced in the original packet
and can change.
<p>
Generally in event processing, you will iterate over an input packet to produce an output packet.
You iterate over an exsiting EventPacket that has input data using the iterator().
This lets you access the events in the input packet.
<p>
When you want to write these events to an existing output packet,
then you need to use the target event's copyFrom(Event e) method
that copies all the fields in the
source packet to the target packet. This lets you copy data such as
timestamp, x,y location to a target event. You can then fill in the target event's extended
type infomation.
<p>
When you iterate over an input packet to write to a target packet,
you obtain the target event to write your results to by using the target packet's
output enumeration by using the outputIterator() method. This enumeration has a method nextOutput() that returns the
next output event to write to. This nextOutput() method also expands the packet if they current capacity needs to be enlarged.
The iterator is initialized by the call to outputIterator().
<p>
The amount of time iterating over events can also be limited by using the time limiter.
This static (class) method starts a timer when it is restarted and after timeout, no more events are returned
from input iteration. These methods are used in FilterChain to limit processing time.
* @author tobi
*/
public class EventPacket<E extends BasicEvent> implements /*EventPacketInterface<E>,*/ Cloneable, Iterable<E> {
static Logger log=Logger.getLogger(EventPacket.class.getName());
/** The time limiting Timer - this is command to JVM and will be shared by all filters on all viewers. */
private static TimeLimiter timeLimitTimer=new TimeLimiter();
/** Default capacity in events for new EventPackets */
public final int DEFAULT_INITIAL_CAPACITY=4096;
private int capacity;
/** the number of events eventList actually contains (0 to size-1) */
private int size=0;
private Class eventClass=null;
/** Constructs new events for this packet. */
protected Constructor eventConstructor=null;
private E eventPrototype;
private transient E[] elementData;
/** Resets the time limiter for input iteration. After the timer times out
(time determined by timeLimitMs) input iterators will not return any more events.
*/
static public void restartTimeLimiter() {
timeLimitTimer.restart();
}
/** restart the time limiter with limit timeLimitMs
@param timeLimitMs time in ms
*/
public static void restartTimeLimiter(int timeLimitMs) {
setTimeLimitMs(timeLimitMs);
restartTimeLimiter();
}
/** Constructs a new EventPacket filled with BasicEvent.
@see net.sf.jaer.event.BasicEvent
*/
public EventPacket() {
this(BasicEvent.class);
}
/** Constructs a new EventPacket filled with the given event class.
@see net.sf.jaer.event.BasicEvent
*/
public EventPacket(Class<? extends BasicEvent> eventClass) {
if(!BasicEvent.class.isAssignableFrom(eventClass)) {
throw new Error("making EventPacket that holds "+eventClass+" but these are not assignable from BasicEvent");
}
setEventClass(eventClass);
}
/** Fills this with DEFAULT_INITIAL_CAPACITY of the event class */
protected void initializeEvents() {
// eventList=new ArrayList<E>(DEFAULT_INITIAL_CAPACITY);
// elementData = (E[])new BasicEvent[DEFAULT_INITIAL_CAPACITY];
elementData=(E[]) Array.newInstance(eventClass, DEFAULT_INITIAL_CAPACITY);
fillWithDefaultEvents(0, DEFAULT_INITIAL_CAPACITY);
size=0;
capacity=DEFAULT_INITIAL_CAPACITY;
}
private void fillWithDefaultEvents(int startIndex, int endIndex) {
try {
for(int i=startIndex; i<endIndex; i++) {
E e=(E) eventConstructor.newInstance();
// eventList.add(e);
elementData[i]=e;
eventPrototype=e;
}
} catch(Exception e) {
log.warning("while filling packet with default events caught "+e);
e.printStackTrace();
}
}
/** Returns duration of packet in microseconds.
*
* @return 0 if there are less than 2 events, otherwise last timestamp minus first timestamp.
*/
public int getDurationUs() {
if(size<2) {
return 0;
} else {
return getLastTimestamp()-getFirstTimestamp();
}
}
public String getDescription() {
return "";
}
/** Sets the size to zero. */
public void clear() {
size=0; // we don't clear list, because that nulls all the events
}
protected void setSize(int n) {
size=n;
// eventList.
// this.numEvents=n;
}
/** @return event rate for this packet in Hz measured stupidly by
* the size in events divided by the packet duration.
* If packet duration is zero (there are no events or zero time interval between the events),
* then rate returned is zero.
@return rate of events in Hz.
*/
public float getEventRateHz() {
if(getDurationUs()==0) {
return 0;
}
return (float) getSize()/((float)getDurationUs()*AEConstants.TICK_DEFAULT_US*1e-6f);
}
// public void copyTo(EventPacket packet) {
// }
/** Returns first event, or null if there are no events.
*
* @return the event or null if there are no events.
*/
public E getFirstEvent() {
if(size==0) {
return null;
}
return elementData[0];
// return eventList.get(0);
}
/** Returns last event, or null if there are no events.
*
* @return the event or null if there are no events.
*/
public E getLastEvent() {
if(size==0) {
return null;
}
return elementData[size-1];
// return eventList.get(size-1);
}
/** Returns first timestamp or 0 if there are no events.
*
* @return timestamp
*/
public int getFirstTimestamp() {
// if(events==null) return 0; else return events[0].timestamp;
return elementData[0].timestamp;
// return eventList.get(0).timestamp;
}
/** @return last timestamp in packet.
If packet is empty, returns zero - which could be important if this time is used for e.g. filtering operations!
*/
public int getLastTimestamp() {
// if(size==0) return 0;
//// if(events==null) return 0; else return events[numEvents-1].timestamp;
// return elementData[size-1].timestamp;
//// return eventList.get(size-1).timestamp;
int s=size;
if(s==0) {
return 0;
}
return elementData[s-1].timestamp;
}
/** Returns the k'th event.
* @throws ArrayIndexOutOfBoundsException if out of bounds of packet.
*/
final public E getEvent(int k) {
if(k>=size) {
throw new ArrayIndexOutOfBoundsException();
}
return elementData[k];
// return eventList.get(k);
}
private InItr inputIterator=null;
/** Returns after initializng the iterator over input events.
@return an iterator that can iterate over the events.
*/
final public Iterator<E> inputIterator() {
if(inputIterator==null) {
inputIterator=new InItr();
} else {
inputIterator.reset();
}
return inputIterator;
}
private OutItr outputIterator=null;
/** Returns an iterator that iterates over the output events.
*
* @return the iterator. Use it to obtain new output events which can be then copied from other events or modfified.
*/
final public OutputEventIterator<E> outputIterator() {
if(outputIterator==null) {
outputIterator=new OutItr();
} else {
outputIterator.reset();
}
return outputIterator;
}
final private class OutItr implements OutputEventIterator {
OutItr() {
size=0; // reset size because we are starting off output packet
}
/** obtains the next output event. Increments the size of the packet */
final public E nextOutput() {
if(size>=capacity) {
enlargeCapacity();
// System.out.println("enlarged "+EventPacket.this);
}
return elementData[size++];
}
final public void reset() {
size=0;
}
public String toString() {
return "OutputEventIterator for packet with size="+size+" capacity="+capacity;
}
}
final private class InItr implements Iterator<E> {
int cursor;
boolean usingTimeout=timeLimitTimer.isEnabled();
public InItr() {
reset();
}
final public boolean hasNext() {
if(usingTimeout) {
return cursor<size&&!timeLimitTimer.isTimedOut();
} else {
return cursor<size;
}
}
final public E next() {
return elementData[cursor++];
}
public void reset() {
cursor=0;
usingTimeout=timeLimitTimer.isEnabled(); // timelimiter only used if timeLimitTimer is enabled but flag to check it it only set on packet reset
}
public void remove() {
for(int ctr=cursor; ctr<size; ctr++) {
elementData[cursor-1]=elementData[cursor];
}
//go back as we removed a packet
cursor--;
size--;
//throw new UnsupportedOperationException();
}
public String toString() {
return "InputEventIterator cursor="+cursor+" for packet with size="+size;
}
}
/** Enlarges capacity by some factor, then copies all event references to the new packet */
private void enlargeCapacity() {
log.info("enlarging capacity of "+this);
int ncapacity=capacity*2; // (capacity*3)/2+1;
Object oldData[]=elementData;
elementData=(E[]) new BasicEvent[ncapacity];
System.arraycopy(oldData, 0, elementData, 0, size);
// capacity still is old capacity and we have already filled it to there with new events, now fill
// in up to new capacity with new events
fillWithDefaultEvents(capacity, ncapacity);
capacity=ncapacity;
}
// public static void main(String[] args){
// EventPacket p=new EventPacket();
// p.test();
// }
//
/**
0.32913625s for 300 n allocations, 1097.1208 us/packet
0.3350817s for 300 n allocations, 1116.939 us/packet
0.3231394s for 300 n allocations, 1077.1313 us/packet
0.32404426s for 300 n allocations, 1080.1475 us/packet
0.3472975s for 300 n allocations, 1157.6583 us/packet
0.33720487s for 300 n allocations, 1124.0162 us/packet
*/
// void test(){
// int nreps=5;
// int size=30000;
// long stime, etime;
// EventPacket<BasicEvent> p,pout;
// OutputEventIterator outItr;
// Iterator<BasicEvent> inItr;
//
// System.out.println("make new packets");
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// for(int i=0;i<nreps;i++){
// p=new EventPacket();
// }
// etime=System.nanoTime();
//
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for "+nreps+" n allocations, "+1e6f*timeSec/nreps+" us/packet ");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
//
// System.out.println("make a new packet and fill with events");
// p=new EventPacket<BasicEvent>();
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// outItr=p.outputIterator();
// for(int i=0;i<size;i++){
// BasicEvent e=outItr.nextOutput();
// e.timestamp=i;
// e.x=((short)i);
// e.y=(e.x);
// }
// etime=System.nanoTime();
//
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for "+size+" fill, "+1e6f*timeSec/size+" us/event ");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
//
//
// System.out.println("iterate over packet, changing all values");
//// p=new EventPacket();
// pout=new EventPacket<BasicEvent>();
//
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// inItr=p.inputIterator();
// outItr=pout.outputIterator();
// for(BasicEvent ein:p){
//
//// while(inItr.hasNext()){
//// BasicEvent ein=inItr.next();
// BasicEvent eout=outItr.nextOutput();
// eout.copyFrom(ein);
// }
// etime=System.nanoTime();
//
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for iteration over packet with size="+p.getSize()+", "+timeSec/p.getSize()+" s per event");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
//
// System.out.println("\nmake packet with OrientationEvent and assign polarity and orientation");
// pout=new EventPacket(OrientationEvent.class);
// OrientationEvent ori=null;
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// outItr=pout.outputIterator();
// for(int i=0;i<size;i++){
// ori=(OrientationEvent)outItr.nextOutput();
// ((PolarityEvent)ori).type=10;
// ori.timestamp=i;
// ori.orientation=(byte)(100);
//// ori.polarity=(byte)(20);
// }
// etime=System.nanoTime();
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for iteration over packet with size="+p.getSize()+", "+timeSec/p.getSize()+" s per event");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
// System.out.println("ori event ="+pout.getEvent(0)+" with type="+ori.getType());
// System.out.println(pout.toString());
//
// }
//
/** Returns the number of events in the packet.
*
* @return size in events.
*/
final public int getSize() {
return size;
}
/** Reports if the packet is empty.
*
* @return true if empty.
*/
public boolean isEmpty() {
return size==0?true:false;
}
@Override
public String toString() {
int sz=getSize();
String s="EventPacket holding "+getEventClass().getSimpleName()+" with size="+sz+" capacity="+capacity;
return s;
}
/** Returns the number of 'types' of events.
*
* @return the number of types, typically a small number like 1,2, or 4.
*/
final public int getNumCellTypes() {
return eventPrototype.getNumCellTypes();
}
/** Returns a prototype of the events in the packet.
*
* @return a single instance of the event.
*/
final public E getEventPrototype() {
return eventPrototype;
}
/** Initializes and returns the iterator */
final public Iterator<E> iterator() {
return inputIterator();
}
/** Returns the class of event in this packet.
@return the event class.
*/
final public Class getEventClass() {
return eventClass;
}
/** Sets the constructor for new (empty) events and initializes the packet.
*
* @param constructor - a zero argument constructor for the new events.
*/
public final void setEventClass(Constructor constructor){
this.eventConstructor=constructor;
this.eventClass=eventConstructor.getDeclaringClass();
initializeEvents();
}
/** Sets the event class for this packet and fills the packet with these events.
*
* @param eventClass which much extend BasicEvent
*/
public final void setEventClass(Class<? extends BasicEvent> eventClass) {
this.eventClass=eventClass;
try {
eventConstructor=eventClass.getConstructor();
} catch(NoSuchMethodException e) {
log.warning("cannot get constructor for constructing Events for building EventPacket: exception="+e.toString()+", cause="+e.getCause());
e.printStackTrace();
}
initializeEvents();
}
/** Gets the class time limit for iteration in ms
*/
final public static int getTimeLimitMs() {
return timeLimitTimer.getTimeLimitMs();
}
/** Sets the class time limit for filtering a packet through the filter chain in ms.
@param timeLimitMs the time limit in ms
@see #restartTimeLimiter
*/
final public static void setTimeLimitMs(int timeLimitMs) {
timeLimitTimer.setTimeLimitMs(timeLimitMs);
}
final public static void setTimeLimitEnabled(boolean yes) {
timeLimitTimer.setEnabled(yes);
}
/** Returns status of time limiting
@return true if timelimiting is enabled
*/
final public static boolean isTimeLimitEnabled() {
return timeLimitTimer.isEnabled();
}
/** Returns true if timeLimitTimer is timed out and timeLimitEnabled */
final public static boolean isTimedOut() {
return timeLimitTimer.isTimedOut();
}
}
| src/net/sf/jaer/event/EventPacket.java | /** EventPacket.java
*
* Created on October 29, 2005, 10:18 PM
*
* To change this template, choose Tools | Options and locate the template under
* the Source Creation and Management node. Right-click the template and choose
* Open. You can then make changes to the template in the Source Editor.
*/
package net.sf.jaer.event;
import net.sf.jaer.chip.AEChip;
import net.sf.jaer.eventprocessing.*;
import java.lang.reflect.Array;
import java.lang.reflect.Constructor;
import java.util.Iterator;
import java.util.logging.Logger;
import net.sf.jaer.aemonitor.AEConstants;
/**
* A packet of events that is used for rendering and event processing.
For efficiency, these packets are designed to be re-used;
they should be allocated rarely and allowed to grow in size. If a packet capacity needs to be
increased a substantial peformance hit will occur e.g. 1 ms per resizing or initial allocation.
<p>
The EventPacket is prefilled with Events that have default values. One constructor lets
you fill the EventPacket with a subclass of BasicEvent. This prefilling avoids
the overhead of object creation. It also allows easy access to all information contained in the
event and it allows storing arbitrary event information, including
extended type information, in EventPackets.
<p>
However, this reuse of existing objects means that you need to take particular precautions.
* The events that are stored
in a packet are references to objects. Therefore you can assign an event to a different packet
but this event will still be referenced in the original packet
and can change.
<p>
Generally in event processing, you will iterate over an input packet to produce an output packet.
You iterate over an exsiting EventPacket that has input data using the iterator().
This lets you access the events in the input packet.
<p>
When you want to write these events to an existing output packet,
then you need to use the target event's copyFrom(Event e) method
that copies all the fields in the
source packet to the target packet. This lets you copy data such as
timestamp, x,y location to a target event. You can then fill in the target event's extended
type infomation.
<p>
When you iterate over an input packet to write to a target packet,
you obtain the target event to write your results to by using the target packet's
output enumeration by using the outputIterator() method. This enumeration has a method nextOutput() that returns the
next output event to write to. This nextOutput() method also expands the packet if they current capacity needs to be enlarged.
The iterator is initialized by the call to outputIterator().
<p>
The amount of time iterating over events can also be limited by using the time limiter.
This static (class) method starts a timer when it is restarted and after timeout, no more events are returned
from input iteration. These methods are used in FilterChain to limit processing time.
* @author tobi
*/
public class EventPacket<E extends BasicEvent> implements /*EventPacketInterface<E>,*/ Cloneable, Iterable<E> {
static Logger log=Logger.getLogger(EventPacket.class.getName());
/** The time limiting Timer */
private static TimeLimiter timeLimitTimer=new TimeLimiter();
/** Resets the time limiter for input iteration. After the timer times out
(time determined by timeLimitMs) input iterators will not return any more events.
*/
static public void restartTimeLimiter() {
timeLimitTimer.restart();
}
/** restart the time limiter with limit timeLimitMs
@param timeLimitMs time in ms
*/
public static void restartTimeLimiter(int timeLimitMs) {
setTimeLimitMs(timeLimitMs);
restartTimeLimiter();
}
/** Default capacity in events for new EventPackets */
public final int DEFAULT_INITIAL_CAPACITY=4096;
private int capacity;
/** the number of events eventList actually contains (0 to size-1) */
private int size=0;
private Class eventClass=null;
private Constructor eventConstructor=null;
private E eventPrototype;
private transient E[] elementData;
/** Constructs a new EventPacket filled with BasicEvent.
@see net.sf.jaer.event.BasicEvent
*/
public EventPacket() {
this(BasicEvent.class);
}
/** Constructs a new EventPacket filled with the given event class.
@see net.sf.jaer.event.BasicEvent
*/
public EventPacket(Class<? extends BasicEvent> eventClass) {
if(!BasicEvent.class.isAssignableFrom(eventClass)) {
throw new Error("making EventPacket that holds "+eventClass+" but these are not assignable from BasicEvent");
}
setEventClass(eventClass);
}
/** Fills this with DEFAULT_INITIAL_CAPACITY of the event class */
protected void initializeEvents() {
// eventList=new ArrayList<E>(DEFAULT_INITIAL_CAPACITY);
// elementData = (E[])new BasicEvent[DEFAULT_INITIAL_CAPACITY];
elementData=(E[]) Array.newInstance(eventClass, DEFAULT_INITIAL_CAPACITY);
fillWithDefaultEvents(0, DEFAULT_INITIAL_CAPACITY);
size=0;
capacity=DEFAULT_INITIAL_CAPACITY;
}
private void fillWithDefaultEvents(int startIndex, int endIndex) {
try {
for(int i=startIndex; i<endIndex; i++) {
E e=(E) eventConstructor.newInstance();
// eventList.add(e);
elementData[i]=e;
eventPrototype=e;
}
} catch(Exception e) {
log.warning("while filling packet with default events caught "+e);
e.printStackTrace();
}
}
/** Returns duration of packet in microseconds.
*
* @return 0 if there are less than 2 events, otherwise last timestamp minus first timestamp.
*/
public int getDurationUs() {
if(size<2) {
return 0;
} else {
return getLastTimestamp()-getFirstTimestamp();
}
}
public String getDescription() {
return "";
}
/** Sets the size to zero. */
public void clear() {
size=0; // we don't clear list, because that nulls all the events
}
protected void setSize(int n) {
size=n;
// eventList.
// this.numEvents=n;
}
/** @return event rate for this packet in Hz measured stupidly by
* the size in events divided by the packet duration.
* If packet duration is zero (there are no events or zero time interval between the events),
* then rate returned is zero.
@return rate of events in Hz.
*/
public float getEventRateHz() {
if(getDurationUs()==0) {
return 0;
}
return (float) getSize()/((float)getDurationUs()*AEConstants.TICK_DEFAULT_US*1e-6f);
}
// public void copyTo(EventPacket packet) {
// }
/** Returns first event, or null if there are no events.
*
* @return the event or null if there are no events.
*/
public E getFirstEvent() {
if(size==0) {
return null;
}
return elementData[0];
// return eventList.get(0);
}
/** Returns last event, or null if there are no events.
*
* @return the event or null if there are no events.
*/
public E getLastEvent() {
if(size==0) {
return null;
}
return elementData[size-1];
// return eventList.get(size-1);
}
/** Returns first timestamp or 0 if there are no events.
*
* @return timestamp
*/
public int getFirstTimestamp() {
// if(events==null) return 0; else return events[0].timestamp;
return elementData[0].timestamp;
// return eventList.get(0).timestamp;
}
/** @return last timestamp in packet.
If packet is empty, returns zero - which could be important if this time is used for e.g. filtering operations!
*/
public int getLastTimestamp() {
// if(size==0) return 0;
//// if(events==null) return 0; else return events[numEvents-1].timestamp;
// return elementData[size-1].timestamp;
//// return eventList.get(size-1).timestamp;
int s=size;
if(s==0) {
return 0;
}
return elementData[s-1].timestamp;
}
/** Returns the k'th event.
* @throws ArrayIndexOutOfBoundsException if out of bounds of packet.
*/
final public E getEvent(int k) {
if(k>=size) {
throw new ArrayIndexOutOfBoundsException();
}
return elementData[k];
// return eventList.get(k);
}
private InItr inputIterator=null;
/** Returns after initializng the iterator over input events.
@return an iterator that can iterate over the events.
*/
final public Iterator<E> inputIterator() {
if(inputIterator==null) {
inputIterator=new InItr();
} else {
inputIterator.reset();
}
return inputIterator;
}
private OutItr outputIterator=null;
/** Returns an iterator that iterates over the output events.
*
* @return the iterator. Use it to obtain new output events which can be then copied from other events or modfified.
*/
final public OutputEventIterator<E> outputIterator() {
if(outputIterator==null) {
outputIterator=new OutItr();
} else {
outputIterator.reset();
}
return outputIterator;
}
final private class OutItr implements OutputEventIterator {
OutItr() {
size=0; // reset size because we are starting off output packet
}
/** obtains the next output event. Increments the size of the packet */
final public E nextOutput() {
if(size>=capacity) {
enlargeCapacity();
// System.out.println("enlarged "+EventPacket.this);
}
return elementData[size++];
}
final public void reset() {
size=0;
}
public String toString() {
return "OutputEventIterator for packet with size="+size+" capacity="+capacity;
}
}
final private class InItr implements Iterator<E> {
int cursor;
boolean usingTimeout=timeLimitTimer.isEnabled();
public InItr() {
reset();
}
final public boolean hasNext() {
if(usingTimeout) {
return cursor<size&&!timeLimitTimer.isTimedOut();
} else {
return cursor<size;
}
}
final public E next() {
return elementData[cursor++];
}
public void reset() {
cursor=0;
usingTimeout=timeLimitTimer.isEnabled(); // timelimiter only used if timeLimitTimer is enabled but flag to check it it only set on packet reset
}
public void remove() {
for(int ctr=cursor; ctr<size; ctr++) {
elementData[cursor-1]=elementData[cursor];
}
//go back as we removed a packet
cursor--;
size--;
//throw new UnsupportedOperationException();
}
public String toString() {
return "InputEventIterator cursor="+cursor+" for packet with size="+size;
}
}
/** Enlarges capacity by some factor, then copies all event references to the new packet */
private void enlargeCapacity() {
log.info("enlarging capacity of "+this);
int ncapacity=capacity*2; // (capacity*3)/2+1;
Object oldData[]=elementData;
elementData=(E[]) new BasicEvent[ncapacity];
System.arraycopy(oldData, 0, elementData, 0, size);
// capacity still is old capacity and we have already filled it to there with new events, now fill
// in up to new capacity with new events
fillWithDefaultEvents(capacity, ncapacity);
capacity=ncapacity;
}
// public static void main(String[] args){
// EventPacket p=new EventPacket();
// p.test();
// }
//
/**
0.32913625s for 300 n allocations, 1097.1208 us/packet
0.3350817s for 300 n allocations, 1116.939 us/packet
0.3231394s for 300 n allocations, 1077.1313 us/packet
0.32404426s for 300 n allocations, 1080.1475 us/packet
0.3472975s for 300 n allocations, 1157.6583 us/packet
0.33720487s for 300 n allocations, 1124.0162 us/packet
*/
// void test(){
// int nreps=5;
// int size=30000;
// long stime, etime;
// EventPacket<BasicEvent> p,pout;
// OutputEventIterator outItr;
// Iterator<BasicEvent> inItr;
//
// System.out.println("make new packets");
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// for(int i=0;i<nreps;i++){
// p=new EventPacket();
// }
// etime=System.nanoTime();
//
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for "+nreps+" n allocations, "+1e6f*timeSec/nreps+" us/packet ");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
//
// System.out.println("make a new packet and fill with events");
// p=new EventPacket<BasicEvent>();
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// outItr=p.outputIterator();
// for(int i=0;i<size;i++){
// BasicEvent e=outItr.nextOutput();
// e.timestamp=i;
// e.x=((short)i);
// e.y=(e.x);
// }
// etime=System.nanoTime();
//
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for "+size+" fill, "+1e6f*timeSec/size+" us/event ");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
//
//
// System.out.println("iterate over packet, changing all values");
//// p=new EventPacket();
// pout=new EventPacket<BasicEvent>();
//
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// inItr=p.inputIterator();
// outItr=pout.outputIterator();
// for(BasicEvent ein:p){
//
//// while(inItr.hasNext()){
//// BasicEvent ein=inItr.next();
// BasicEvent eout=outItr.nextOutput();
// eout.copyFrom(ein);
// }
// etime=System.nanoTime();
//
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for iteration over packet with size="+p.getSize()+", "+timeSec/p.getSize()+" s per event");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
//
// System.out.println("\nmake packet with OrientationEvent and assign polarity and orientation");
// pout=new EventPacket(OrientationEvent.class);
// OrientationEvent ori=null;
// for(int k=0;k<nreps;k++){
// stime=System.nanoTime();
// outItr=pout.outputIterator();
// for(int i=0;i<size;i++){
// ori=(OrientationEvent)outItr.nextOutput();
// ((PolarityEvent)ori).type=10;
// ori.timestamp=i;
// ori.orientation=(byte)(100);
//// ori.polarity=(byte)(20);
// }
// etime=System.nanoTime();
// float timeSec=(etime-stime)/1e9f;
//
// System.out.println(timeSec+ "s"+" for iteration over packet with size="+p.getSize()+", "+timeSec/p.getSize()+" s per event");
// System.out.flush();
// try{
// Thread.currentThread().sleep(10);
// }catch(Exception e){}
// }
// System.out.println("ori event ="+pout.getEvent(0)+" with type="+ori.getType());
// System.out.println(pout.toString());
//
// }
//
/** Returns the number of events in the packet.
*
* @return size in events.
*/
final public int getSize() {
return size;
}
/** Reports if the packet is empty.
*
* @return true if empty.
*/
public boolean isEmpty() {
return size==0?true:false;
}
public String toString() {
int size=getSize();
String s="EventPacket holding "+getEventClass().getSimpleName()+" with size="+size+" capacity="+capacity;
return s;
}
/** Returns the number of 'types' of events.
*
* @return the number of types, typically a small number like 1,2, or 4.
*/
final public int getNumCellTypes() {
return eventPrototype.getNumCellTypes();
}
/** Returns a prototype of the events in the packet.
*
* @return a single instance of the event.
*/
final public E getEventPrototype() {
return eventPrototype;
}
/** Initializes and returns the iterator */
final public Iterator<E> iterator() {
return inputIterator();
}
/** Returns the class of event in this packet.
@return the event class.
*/
final public Class getEventClass() {
return eventClass;
}
/** Sets the event class for this packet and fills the packet with these events.
*
* @param eventClass which much extend BasicEvent
*/
public final void setEventClass(Class<? extends BasicEvent> eventClass) {
this.eventClass=eventClass;
try {
eventConstructor=eventClass.getConstructor();
} catch(NoSuchMethodException e) {
log.warning("cannot get constructor for constructing Events for building EventPacket: exception="+e.toString()+", cause="+e.getCause());
e.printStackTrace();
}
initializeEvents();
}
/** Gets the class time limit for iteration in ms
*/
final public static int getTimeLimitMs() {
return timeLimitTimer.getTimeLimitMs();
}
/** Sets the class time limit for filtering a packet through the filter chain in ms.
@param timeLimitMs the time limit in ms
@see #restartTimeLimiter
*/
final public static void setTimeLimitMs(int timeLimitMs) {
timeLimitTimer.setTimeLimitMs(timeLimitMs);
}
final public static void setTimeLimitEnabled(boolean yes) {
timeLimitTimer.setEnabled(yes);
}
/** Returns status of time limiting
@return true if timelimiting is enabled
*/
final public static boolean isTimeLimitEnabled() {
return timeLimitTimer.isEnabled();
}
/** Returns true if timeLimitTimer is timed out and timeLimitEnabled */
final public static boolean isTimedOut() {
return timeLimitTimer.isTimedOut();
}
}
| formatting, javadoc, overrides
git-svn-id: e3d3b427d532171a6bd7557d8a4952a393b554a2@1705 b7f4320f-462c-0410-a916-d9f35bb82d52
| src/net/sf/jaer/event/EventPacket.java | formatting, javadoc, overrides |
|
Java | lgpl-2.1 | 78602aa73c50da29ac9c92b86b002f7e41106305 | 0 | evolvedmicrobe/beast-mcmc,danieljue/beast-mcmc,danieljue/beast-mcmc,evolvedmicrobe/beast-mcmc,evolvedmicrobe/beast-mcmc,danieljue/beast-mcmc,danieljue/beast-mcmc,evolvedmicrobe/beast-mcmc,danieljue/beast-mcmc,evolvedmicrobe/beast-mcmc | /*
* BeastMain.java
*
* Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.app.beast;
import dr.app.util.Arguments;
import dr.app.util.Utils;
import dr.inference.mcmc.MCMC;
import dr.math.MathUtils;
import dr.util.ErrorLogHandler;
import dr.util.MessageLogHandler;
import dr.util.Version;
import dr.xml.XMLParser;
import org.virion.jam.util.IconUtils;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Iterator;
import java.util.logging.*;
public class BeastMain {
private final static Version version = new BeastVersion();
static class BeastConsoleApp extends org.virion.jam.console.ConsoleApplication {
XMLParser parser = null;
public BeastConsoleApp(String nameString, String aboutString, javax.swing.Icon icon) throws IOException {
super(nameString, aboutString, icon, false);
}
public void doStop() {
Iterator iter = parser.getThreads();
while (iter.hasNext()) {
Thread thread = (Thread) iter.next();
thread.stop();
}
}
}
public BeastMain(File inputFile, BeastConsoleApp consoleApp, int maxErrorCount, boolean verbose, boolean strictXML) {
if (inputFile == null) {
System.err.println();
System.err.println("Error: no input file specified");
return;
}
String fileName = inputFile.getName();
final Logger infoLogger = Logger.getLogger("dr.apps.beast");
try {
FileReader fileReader = new FileReader(inputFile);
XMLParser parser = new BeastParser(new String[]{fileName}, verbose, strictXML);
if (consoleApp != null) {
consoleApp.parser = parser;
}
// Add a handler to handle warnings and errors. This is a ConsoleHandler
// so the messages will go to StdOut..
Logger logger = Logger.getLogger("dr");
Handler handler = new MessageLogHandler();
handler.setFilter(new Filter() {
public boolean isLoggable(LogRecord record) {
return record.getLevel().intValue() < Level.WARNING.intValue();
}
});
logger.addHandler(handler);
// Add a handler to handle warnings and errors. This is a ConsoleHandler
// so the messages will go to StdErr..
handler = new ConsoleHandler();
handler.setFilter(new Filter() {
public boolean isLoggable(LogRecord record) {
return record.getLevel().intValue() >= Level.WARNING.intValue();
}
});
// logger.addHandler(handler);
logger.setUseParentHandlers(false);
infoLogger.info("Parsing XML file: " + fileName);
infoLogger.info(" File encoding: " + fileReader.getEncoding());
// This is a special logger that is for logging numerical and statistical errors
// during the MCMC run. It will tolerate up to maxErrorCount before throwing a
// RuntimeException to shut down the run.
//Logger errorLogger = Logger.getLogger("error");
handler = new ErrorLogHandler(maxErrorCount);
handler.setLevel(Level.WARNING);
logger.addHandler(handler);
parser.parse(fileReader, true);
} catch (java.io.IOException ioe) {
infoLogger.severe("File error: " + ioe.getMessage());
} catch (org.xml.sax.SAXParseException spe) {
if (spe.getMessage() != null && spe.getMessage().equals("Content is not allowed in prolog")) {
infoLogger.severe("Parsing error - the input file, " + fileName + ", is not a valid XML file.");
} else {
infoLogger.severe("Error running file: " + fileName);
infoLogger.severe("Parsing error - poorly formed XML (possibly not an XML file):\n" +
spe.getMessage());
}
} catch (org.w3c.dom.DOMException dome) {
infoLogger.severe("Error running file: " + fileName);
infoLogger.severe("Parsing error - poorly formed XML:\n" +
dome.getMessage());
} catch (dr.xml.XMLParseException pxe) {
if (pxe.getMessage() != null && pxe.getMessage().equals("Unknown root document element, beauti")) {
infoLogger.severe("Error running file: " + fileName);
infoLogger.severe(
"The file you just tried to run in BEAST is actually a BEAUti document.\n" +
"Although this uses XML, it is not a format that BEAST understands.\n" +
"These files are used by BEAUti to save and load your settings so that\n" +
"you can go back and alter them. To generate a BEAST file you must\n" +
"select the 'Generate BEAST File' option, either from the File menu or\n" +
"the button at the bottom right of the window.");
} else {
infoLogger.severe("Parsing error - poorly formed BEAST file, " + fileName + ":\n" +
pxe.getMessage());
}
} catch (RuntimeException rex) {
if (rex.getMessage() != null && rex.getMessage().startsWith("The initial posterior is zero")) {
infoLogger.warning("Error running file: " + fileName);
infoLogger.severe(
"The initial model is invalid because state has a zero probability.\n\n" +
"If the log likelihood of the tree is -Inf, his may be because the\n" +
"initial, random tree is so large that it has an extremely bad\n" +
"likelihood which is being rounded to zero.\n\n" +
"Alternatively, it may be that the product of starting mutation rate\n" +
"and tree height is extremely small or extremely large. \n\n" +
"Finally, it may be that the initial state is incompatible with\n" +
"one or more 'hard' constraints (on monophyly or bounds on parameter\n" +
"values. This will result in Priors with zero probability.\n\n" +
"The individual components of the posterior are as follows:\n" +
rex.getMessage() + "\n" +
"For more information go to <http://beast.bio.ed.ac.uk/>.");
} else {
// This call never returns as another RuntimeException exception is raised by
// the error log handler???
infoLogger.warning("Error running file: " + fileName);
System.err.println("Fatal exception: " + rex.getMessage());
rex.printStackTrace(System.err);
}
} catch (Exception ex) {
infoLogger.warning("Error running file: " + fileName);
infoLogger.severe("Fatal exception: " + ex.getMessage());
System.err.println("Fatal exception: " + ex.getMessage());
ex.printStackTrace(System.err);
}
}
public static void centreLine(String line, int pageWidth) {
int n = pageWidth - line.length();
int n1 = n / 2;
for (int i = 0; i < n1; i++) {
System.out.print(" ");
}
System.out.println(line);
}
public static void printTitle() {
System.out.println();
centreLine("BEAST " + version.getVersionString() + ", 2002-2008", 60);
centreLine("Bayesian Evolutionary Analysis Sampling Trees", 60);
centreLine("by", 60);
centreLine("Alexei J. Drummond and Andrew Rambaut", 60);
System.out.println();
centreLine("Department of Computer Science", 60);
centreLine("University of Auckland", 60);
centreLine("[email protected]", 60);
System.out.println();
centreLine("Institute of Evolutionary Biology", 60);
centreLine("University of Edinburgh", 60);
centreLine("[email protected]", 60);
System.out.println();
}
public static void printHeader() {
System.out.println("Downloads, Help & Resources:\n" +
"\thttp://beast.bio.ed.ac.uk/\n" +
"\n" +
"Source code distributed under the GNU Lesser General Public License:\n" +
"\thttp://code.google.com/p/beast-mcmc/\n" +
"\n" +
"Additional programming & components created by:\n" +
"\tRoald Forsberg\n" +
"\tGerton Lunter\n" +
"\tSidney Markowitz\n" +
"\tOliver Pybus\n" +
"\n" +
"Thanks to (for use of their code):\n" +
"\tKorbinian Strimmer");
}
public static void printUsage(Arguments arguments) {
arguments.printUsage("beast", "[<input-file-name>]");
System.out.println();
System.out.println(" Example: beast test.xml");
System.out.println(" Example: beast -window test.xml");
System.out.println();
}
//Main method
public static void main(String[] args) throws java.io.IOException {
printTitle();
Arguments arguments = new Arguments(
new Arguments.Option[]{
new Arguments.Option("verbose", "verbose XML parsing messages"),
new Arguments.Option("strict", "Fail on non conforming BEAST XML file"),
new Arguments.Option("window", "provide a console window"),
new Arguments.Option("working", "change working directory to input file's directory"),
new Arguments.LongOption("seed", "specify a random number generator seed"),
new Arguments.IntegerOption("errors", "maximum number of numerical errors before stopping"),
// new Arguments.Option("logops", "hack: log ops to stderr"),
new Arguments.IntegerOption("otfops", "experimental: on the fly op weigths. recompute frequency" +
"in number of states."),
new Arguments.Option("help", "option to print this message"),
});
try {
arguments.parseArguments(args);
} catch (Arguments.ArgumentException ae) {
System.out.println();
System.out.println(ae.getMessage());
System.out.println();
printTitle();
printUsage(arguments);
System.exit(1);
}
if (arguments.hasOption("help")) {
printTitle();
printUsage(arguments);
System.exit(0);
}
final boolean verbose = arguments.hasOption("verbose");
final boolean strictXML = arguments.hasOption("strict");
final boolean window = arguments.hasOption("window");
final boolean working = arguments.hasOption("working");
// (HACK)
//MCMC.logOps = arguments.hasOption("logops");
MCMC.ontheflyFreq = arguments.hasOption("otfops") ? arguments.getIntegerOption("otfops") : 0;
long seed = MathUtils.getSeed();
if (arguments.hasOption("seed")) {
seed = arguments.getLongOption("seed");
if (seed <= 0) {
printTitle();
System.err.println("The random number seed should be > 0");
System.exit(1);
}
MathUtils.setSeed(seed);
}
int maxErrorCount = 0;
if (arguments.hasOption("errors")) {
maxErrorCount = arguments.getIntegerOption("errors");
if (maxErrorCount < 0) {
maxErrorCount = 0;
}
}
// if (System.getProperty("dr.app.beast.main.window", "false").toLowerCase().equals("true")) {
// window = true;
// }
BeastConsoleApp consoleApp = null;
if (window) {
System.setProperty("com.apple.macos.useScreenMenuBar", "true");
System.setProperty("apple.laf.useScreenMenuBar", "true");
System.setProperty("apple.awt.showGrowBox", "true");
javax.swing.Icon icon = IconUtils.getIcon(BeastMain.class, "images/beast.png");
String nameString = "BEAST " + version.getVersionString();
String aboutString = "<html><center><p>Bayesian Evolutionary Analysis Sampling Trees<br>" +
"Version " + version.getVersionString() + ", 2002-2008</p>" +
"<p>by<br>" +
"Alexei J. Drummond and Andrew Rambaut</p>" +
"<p>Department of Computer Science, University of Auckland<br>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<p>Institute of Evolutionary Biology, University of Edinburgh<br>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<p><a href=\"http://beast.bio.ed.ac.uk/\">http://beast.bio.ed.ac.uk/</a></p>" +
"<p>Source code distributed under the GNU LGPL:<br>" +
"<a href=\"http://code.google.com/p/beast-mcmc/\">http://code.google.com/p/beast-mcmc/</a></p>" +
"<p>Model contributions from:<br>" +
"Erik Bloomquist, Roald Forsberg, Joseph Heled, Gerton Lunter, Sidney Markowitz, " +
"Vladimir Minin, Oliver Pybus, Marc Suchard, Jen Tom</p>" +
"<p>Thanks to Korbinian Strimmer for use of his code</p>" +
"</center></html>";
consoleApp = new BeastConsoleApp(nameString, aboutString, icon);
}
String inputFileName = null;
String[] args2 = arguments.getLeftoverArguments();
if (args2.length > 1) {
System.err.println("Unknown option: " + args2[1]);
System.err.println();
printTitle();
printUsage(arguments);
System.exit(1);
}
File inputFile = null;
if (args2.length > 0) {
inputFileName = args2[0];
inputFile = new File(inputFileName);
}
if (inputFileName == null) {
// No input file name was given so throw up a dialog box...
inputFile = Utils.getLoadFile("BEAST " + version.getVersionString() + " - Select XML input file");
}
if (inputFile != null && working) {
System.setProperty("user.dir", inputFile.getParent());
}
printTitle();
printHeader();
System.out.println();
System.out.println("Random number seed: " + seed);
System.out.println();
new BeastMain(inputFile, consoleApp, maxErrorCount, verbose, strictXML);
}
}
| src/dr/app/beast/BeastMain.java | /*
* BeastMain.java
*
* Copyright (C) 2002-2006 Alexei Drummond and Andrew Rambaut
*
* This file is part of BEAST.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership and licensing.
*
* BEAST is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
*
* BEAST is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with BEAST; if not, write to the
* Free Software Foundation, Inc., 51 Franklin St, Fifth Floor,
* Boston, MA 02110-1301 USA
*/
package dr.app.beast;
import dr.app.util.Arguments;
import dr.app.util.Utils;
import dr.inference.mcmc.MCMC;
import dr.math.MathUtils;
import dr.util.ErrorLogHandler;
import dr.util.MessageLogHandler;
import dr.util.Version;
import dr.xml.XMLParser;
import org.virion.jam.util.IconUtils;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.Iterator;
import java.util.logging.*;
public class BeastMain {
private final static Version version = new BeastVersion();
static class BeastConsoleApp extends org.virion.jam.console.ConsoleApplication {
XMLParser parser = null;
public BeastConsoleApp(String nameString, String aboutString, javax.swing.Icon icon) throws IOException {
super(nameString, aboutString, icon, false);
}
public void doStop() {
Iterator iter = parser.getThreads();
while (iter.hasNext()) {
Thread thread = (Thread)iter.next();
thread.stop();
}
}
}
public BeastMain(File inputFile, BeastConsoleApp consoleApp, int maxErrorCount, boolean verbose, boolean strictXML) {
if (inputFile == null) {
System.err.println();
System.err.println("Error: no input file specified");
return;
}
String fileName = inputFile.getName();
final Logger infoLogger = Logger.getLogger("dr.apps.beast");
try {
FileReader fileReader = new FileReader(inputFile);
XMLParser parser = new BeastParser(new String[] {fileName}, verbose, strictXML);
if (consoleApp != null) {
consoleApp.parser = parser;
}
// Add a handler to handle warnings and errors. This is a ConsoleHandler
// so the messages will go to StdOut..
Logger logger = Logger.getLogger("dr");
Handler handler = new MessageLogHandler();
handler.setFilter(new Filter() {
public boolean isLoggable(LogRecord record) {
return record.getLevel().intValue() < Level.WARNING.intValue();
}
});
logger.addHandler(handler);
// Add a handler to handle warnings and errors. This is a ConsoleHandler
// so the messages will go to StdErr..
handler = new ConsoleHandler();
handler.setFilter(new Filter() {
public boolean isLoggable(LogRecord record) {
return record.getLevel().intValue() >= Level.WARNING.intValue();
}
});
// logger.addHandler(handler);
logger.setUseParentHandlers(false);
infoLogger.info("Parsing XML file: " + fileName);
infoLogger.info(" File encoding: " + fileReader.getEncoding());
// This is a special logger that is for logging numerical and statistical errors
// during the MCMC run. It will tolerate up to maxErrorCount before throwing a
// RuntimeException to shut down the run.
//Logger errorLogger = Logger.getLogger("error");
handler = new ErrorLogHandler(maxErrorCount);
logger.addHandler(handler);
parser.parse(fileReader, true);
} catch (java.io.IOException ioe) {
infoLogger.severe("File error: " + ioe.getMessage());
} catch (org.xml.sax.SAXParseException spe) {
if (spe.getMessage() != null && spe.getMessage().equals("Content is not allowed in prolog")) {
infoLogger.severe("Parsing error - the input file, " + fileName + ", is not a valid XML file.");
} else {
infoLogger.severe("Error running file: " + fileName);
infoLogger.severe("Parsing error - poorly formed XML (possibly not an XML file):\n" +
spe.getMessage());
}
} catch (org.w3c.dom.DOMException dome) {
infoLogger.severe("Error running file: " + fileName);
infoLogger.severe("Parsing error - poorly formed XML:\n" +
dome.getMessage());
} catch (dr.xml.XMLParseException pxe) {
if (pxe.getMessage() != null && pxe.getMessage().equals("Unknown root document element, beauti")) {
infoLogger.severe("Error running file: " + fileName);
infoLogger.severe(
"The file you just tried to run in BEAST is actually a BEAUti document.\n" +
"Although this uses XML, it is not a format that BEAST understands.\n" +
"These files are used by BEAUti to save and load your settings so that\n" +
"you can go back and alter them. To generate a BEAST file you must\n" +
"select the 'Generate BEAST File' option, either from the File menu or\n" +
"the button at the bottom right of the window.");
} else {
infoLogger.severe("Parsing error - poorly formed BEAST file, " + fileName + ":\n" +
pxe.getMessage());
}
} catch (RuntimeException rex) {
if (rex.getMessage() != null && rex.getMessage().startsWith("The initial posterior is zero")) {
infoLogger.warning("Error running file: " + fileName);
infoLogger.severe(
"The initial model is invalid because state has a zero probability.\n\n" +
"If the log likelihood of the tree is -Inf, his may be because the\n" +
"initial, random tree is so large that it has an extremely bad\n" +
"likelihood which is being rounded to zero.\n\n" +
"Alternatively, it may be that the product of starting mutation rate\n" +
"and tree height is extremely small or extremely large. \n\n" +
"Finally, it may be that the initial state is incompatible with\n" +
"one or more 'hard' constraints (on monophyly or bounds on parameter\n" +
"values. This will result in Priors with zero probability.\n\n" +
"The individual components of the posterior are as follows:\n" +
rex.getMessage() + "\n" +
"For more information go to <http://beast.bio.ed.ac.uk/>.");
} else {
// This call never returns as another RuntimeException exception is raised by
// the error log handler???
infoLogger.warning("Error running file: " + fileName);
System.err.println("Fatal exception: " + rex.getMessage());
rex.printStackTrace(System.err);
}
} catch (Exception ex) {
infoLogger.warning("Error running file: " + fileName);
infoLogger.severe("Fatal exception: " + ex.getMessage());
System.err.println("Fatal exception: " + ex.getMessage());
ex.printStackTrace(System.err);
}
}
public static void centreLine(String line, int pageWidth) {
int n = pageWidth - line.length();
int n1 = n / 2;
for (int i = 0; i < n1; i++) { System.out.print(" "); }
System.out.println(line);
}
public static void printTitle() {
System.out.println();
centreLine("BEAST " + version.getVersionString() + ", 2002-2008", 60);
centreLine("Bayesian Evolutionary Analysis Sampling Trees", 60);
centreLine("by", 60);
centreLine("Alexei J. Drummond and Andrew Rambaut", 60);
System.out.println();
centreLine("Department of Computer Science", 60);
centreLine("University of Auckland", 60);
centreLine("[email protected]", 60);
System.out.println();
centreLine("Institute of Evolutionary Biology", 60);
centreLine("University of Edinburgh", 60);
centreLine("[email protected]", 60);
System.out.println();
}
public static void printHeader() {
System.out.println("Downloads, Help & Resources:\n" +
"\thttp://beast.bio.ed.ac.uk/\n" +
"\n" +
"Source code distributed under the GNU Lesser General Public License:\n" +
"\thttp://code.google.com/p/beast-mcmc/\n" +
"\n" +
"Additional programming & components created by:\n" +
"\tRoald Forsberg\n" +
"\tGerton Lunter\n" +
"\tSidney Markowitz\n" +
"\tOliver Pybus\n" +
"\n" +
"Thanks to (for use of their code):\n" +
"\tKorbinian Strimmer");
}
public static void printUsage(Arguments arguments) {
arguments.printUsage("beast", "[<input-file-name>]");
System.out.println();
System.out.println(" Example: beast test.xml");
System.out.println(" Example: beast -window test.xml");
System.out.println();
}
//Main method
public static void main(String[] args) throws java.io.IOException {
printTitle();
Arguments arguments = new Arguments(
new Arguments.Option[] {
new Arguments.Option("verbose", "verbose XML parsing messages"),
new Arguments.Option("strict", "Fail on non conforming BEAST XML file"),
new Arguments.Option("window", "provide a console window"),
new Arguments.Option("working", "change working directory to input file's directory"),
new Arguments.LongOption("seed", "specify a random number generator seed"),
new Arguments.IntegerOption("errors", "maximum number of numerical errors before stopping"),
// new Arguments.Option("logops", "hack: log ops to stderr"),
new Arguments.IntegerOption("otfops", "experimental: on the fly op weigths. recompute frequency" +
"in number of states."),
new Arguments.Option("help", "option to print this message"),
});
try {
arguments.parseArguments(args);
} catch (Arguments.ArgumentException ae) {
System.out.println();
System.out.println(ae.getMessage());
System.out.println();
printTitle();
printUsage(arguments);
System.exit(1);
}
if (arguments.hasOption("help")) {
printTitle();
printUsage(arguments);
System.exit(0);
}
final boolean verbose = arguments.hasOption("verbose");
final boolean strictXML = arguments.hasOption("strict");
final boolean window = arguments.hasOption("window");
final boolean working = arguments.hasOption("working");
// (HACK)
//MCMC.logOps = arguments.hasOption("logops");
MCMC.ontheflyFreq = arguments.hasOption("otfops") ? arguments.getIntegerOption("otfops") : 0;
long seed = MathUtils.getSeed();
if (arguments.hasOption("seed")) {
seed = arguments.getLongOption("seed");
if (seed <= 0) {
printTitle();
System.err.println("The random number seed should be > 0");
System.exit(1);
}
MathUtils.setSeed(seed);
}
int maxErrorCount = 0;
if (arguments.hasOption("errors")) {
maxErrorCount = arguments.getIntegerOption("errors");
if (maxErrorCount < 0) {
maxErrorCount = 0;
}
}
// if (System.getProperty("dr.app.beast.main.window", "false").toLowerCase().equals("true")) {
// window = true;
// }
BeastConsoleApp consoleApp = null;
if (window) {
System.setProperty("com.apple.macos.useScreenMenuBar","true");
System.setProperty("apple.laf.useScreenMenuBar","true");
System.setProperty("apple.awt.showGrowBox","true");
javax.swing.Icon icon = IconUtils.getIcon(BeastMain.class, "images/beast.png");
String nameString = "BEAST " + version.getVersionString();
String aboutString = "<html><center><p>Bayesian Evolutionary Analysis Sampling Trees<br>" +
"Version " + version.getVersionString() + ", 2002-2008</p>" +
"<p>by<br>" +
"Alexei J. Drummond and Andrew Rambaut</p>" +
"<p>Department of Computer Science, University of Auckland<br>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<p>Institute of Evolutionary Biology, University of Edinburgh<br>" +
"<a href=\"mailto:[email protected]\">[email protected]</a></p>" +
"<p><a href=\"http://beast.bio.ed.ac.uk/\">http://beast.bio.ed.ac.uk/</a></p>" +
"<p>Source code distributed under the GNU LGPL:<br>" +
"<a href=\"http://code.google.com/p/beast-mcmc/\">http://code.google.com/p/beast-mcmc/</a></p>" +
"<p>Model contributions from:<br>" +
"Erik Bloomquist, Roald Forsberg, Joseph Heled, Gerton Lunter, Sidney Markowitz, " +
"Vladimir Minin, Oliver Pybus, Marc Suchard, Jen Tom</p>" +
"<p>Thanks to Korbinian Strimmer for use of his code</p>" +
"</center></html>";
consoleApp = new BeastConsoleApp(nameString, aboutString, icon);
}
String inputFileName = null;
String[] args2 = arguments.getLeftoverArguments();
if (args2.length > 1) {
System.err.println("Unknown option: " + args2[1]);
System.err.println();
printTitle();
printUsage(arguments);
System.exit(1);
}
File inputFile = null;
if (args2.length > 0) {
inputFileName = args2[0];
inputFile = new File(inputFileName);
}
if (inputFileName == null) {
// No input file name was given so throw up a dialog box...
inputFile = Utils.getLoadFile("BEAST "+version.getVersionString()+" - Select XML input file");
}
if (inputFile != null && working) {
System.setProperty("user.dir", inputFile.getParent());
}
printTitle();
printHeader();
System.out.println();
System.out.println("Random number seed: " + seed);
System.out.println();
new BeastMain(inputFile, consoleApp, maxErrorCount, verbose, strictXML);
}
}
| removed annoying duplication of log messages during xml parsing | src/dr/app/beast/BeastMain.java | removed annoying duplication of log messages during xml parsing |
|
Java | lgpl-2.1 | b6142e2bae7fc401105a7cd2cedd43faa4e29923 | 0 | bjalon/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,bjalon/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,nuxeo-archives/nuxeo-features,nuxeo-archives/nuxeo-features,deadcyclo/nuxeo-features,deadcyclo/nuxeo-features,bjalon/nuxeo-features,nuxeo-archives/nuxeo-features | /*
* (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* <a href="mailto:[email protected]">Anahide Tchertchian</a>
*
* $Id: FakeDocument.java 26383 2007-10-23 16:21:34Z bstefanescu $
*/
package org.nuxeo.ecm.platform.relations.io.test;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.nuxeo.ecm.core.api.Blob;
import org.nuxeo.ecm.core.api.DocumentException;
import org.nuxeo.ecm.core.api.model.DocumentPart;
import org.nuxeo.ecm.core.lifecycle.LifeCycleException;
import org.nuxeo.ecm.core.model.Document;
import org.nuxeo.ecm.core.model.DocumentIterator;
import org.nuxeo.ecm.core.model.Property;
import org.nuxeo.ecm.core.model.Repository;
import org.nuxeo.ecm.core.model.Session;
import org.nuxeo.ecm.core.schema.DocumentType;
import org.nuxeo.ecm.core.schema.DocumentTypeImpl;
import org.nuxeo.ecm.core.versioning.DocumentVersion;
import org.nuxeo.ecm.core.versioning.DocumentVersionIterator;
/**
* @author <a href="mailto:[email protected]">Anahide Tchertchian</a>
*
*/
public class FakeDocument implements Document {
final String uuid;
public FakeDocument(String uuid) {
this.uuid = uuid;
}
public String getName() throws DocumentException {
return uuid;
}
public String getUUID() throws DocumentException {
return uuid;
}
public Session getSession() {
return new FakeSession();
}
public DocumentType getType() {
return new DocumentTypeImpl((DocumentType) null, "FakeDocument");
}
public String getPath() throws DocumentException {
return "/path/" + uuid;
}
public Repository getRepository() {
return new FakeRepository();
}
// not implemented (useless)
public boolean followTransition(String transition)
throws LifeCycleException {
return false;
}
public Collection<String> getAllowedStateTransitions()
throws LifeCycleException {
return null;
}
public String getCurrentLifeCycleState() throws LifeCycleException {
return null;
}
public Calendar getLastModified() throws DocumentException {
return null;
}
public String getLifeCyclePolicy() throws LifeCycleException {
return null;
}
public Document getParent() throws DocumentException {
return null;
}
public <T extends Serializable> T getSystemProp(String name, Class<T> type)
throws DocumentException {
return null;
}
public boolean isDirty() throws DocumentException {
return false;
}
public boolean isFolder() {
return false;
}
public boolean isProxy() {
return false;
}
public void remove() throws DocumentException {
}
public void save() throws DocumentException {
}
public void setDirty(boolean value) throws DocumentException {
}
public <T extends Serializable> void setSystemProp(String name, T value)
throws DocumentException {
}
public void checkIn(String label, String description)
throws DocumentException {
}
public void checkIn(String label) throws DocumentException {
}
public void checkOut() throws DocumentException {
}
public DocumentVersion getLastVersion() throws DocumentException {
return null;
}
public Document getSourceDocument() throws DocumentException {
return null;
}
public Document getVersion(String label) throws DocumentException {
return null;
}
public List<String> getVersionsIds() throws DocumentException {
return null;
}
public DocumentVersionIterator getVersions() throws DocumentException {
return null;
}
public boolean hasVersions() throws DocumentException {
return false;
}
public boolean isCheckedOut() throws DocumentException {
return false;
}
public boolean isVersion() {
return false;
}
public void restore(String label) throws DocumentException {
}
public Document addChild(String name, String typeName)
throws DocumentException {
return null;
}
public Document getChild(String name) throws DocumentException {
return null;
}
public Iterator<Document> getChildren() throws DocumentException {
return null;
}
public DocumentIterator getChildren(int start) throws DocumentException {
return null;
}
public List<String> getChildrenIds() throws DocumentException {
return null;
}
public boolean hasChild(String name) throws DocumentException {
return false;
}
public boolean hasChildren() throws DocumentException {
return false;
}
public void removeChild(String name) throws DocumentException {
}
public Document resolvePath(String relPath) throws DocumentException {
return null;
}
public Map<String, Object> exportFlatMap(String[] schemas)
throws DocumentException {
return null;
}
public Map<String, Object> exportMap(String schemaName)
throws DocumentException {
return null;
}
public Map<String, Map<String, Object>> exportMap(String[] schemas)
throws DocumentException {
return null;
}
public boolean getBoolean(String name) throws DocumentException {
return false;
}
public Blob getContent(String name) throws DocumentException {
return null;
}
public Calendar getDate(String name) throws DocumentException {
return null;
}
public List<String> getDirtyFields() {
return null;
}
public double getDouble(String name) throws DocumentException {
return 0;
}
public long getLong(String name) throws DocumentException {
return 0;
}
public Collection<Property> getProperties() throws DocumentException {
return null;
}
public Property getProperty(String name) throws DocumentException {
return null;
}
public Iterator<Property> getPropertyIterator() throws DocumentException {
return null;
}
public Object getPropertyValue(String name) throws DocumentException {
return null;
}
public String getString(String name) throws DocumentException {
return null;
}
public void importFlatMap(Map<String, Object> map) throws DocumentException {
}
public void importMap(Map<String, Map<String, Object>> map)
throws DocumentException {
}
public boolean isPropertySet(String path) throws DocumentException {
return false;
}
public void removeProperty(String name) throws DocumentException {
}
public void setBoolean(String name, boolean value) throws DocumentException {
}
public void setContent(String name, Blob value) throws DocumentException {
}
public void setDate(String name, Calendar value) throws DocumentException {
}
public void setDouble(String name, double value) throws DocumentException {
}
public void setLong(String name, long value) throws DocumentException {
}
public void setPropertyValue(String name, Object value)
throws DocumentException {
}
public void setString(String name, String value) throws DocumentException {
}
public String getLock() throws DocumentException {
return null;
}
public boolean isLocked() throws DocumentException {
return false;
}
public void setLock(String key) throws DocumentException {
}
public String unlock() throws DocumentException {
return null;
}
public void readDocumentPart(DocumentPart dp) throws Exception {
}
public void writeDocumentPart(DocumentPart dp) throws Exception {
}
public void orderBefore(String src, String dest) throws DocumentException {
}
}
| nuxeo-platform-relations-io/src/test/java/org/nuxeo/ecm/platform/relations/io/test/FakeDocument.java | /*
* (C) Copyright 2006-2007 Nuxeo SAS (http://nuxeo.com/) and contributors.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the GNU Lesser General Public License
* (LGPL) version 2.1 which accompanies this distribution, and is available at
* http://www.gnu.org/licenses/lgpl.html
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* Contributors:
* <a href="mailto:[email protected]">Anahide Tchertchian</a>
*
* $Id: FakeDocument.java 26383 2007-10-23 16:21:34Z bstefanescu $
*/
package org.nuxeo.ecm.platform.relations.io.test;
import java.io.Serializable;
import java.util.Calendar;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.nuxeo.ecm.core.api.Blob;
import org.nuxeo.ecm.core.api.DocumentException;
import org.nuxeo.ecm.core.api.model.DocumentPart;
import org.nuxeo.ecm.core.lifecycle.LifeCycleException;
import org.nuxeo.ecm.core.model.Document;
import org.nuxeo.ecm.core.model.DocumentIterator;
import org.nuxeo.ecm.core.model.Property;
import org.nuxeo.ecm.core.model.Repository;
import org.nuxeo.ecm.core.model.Session;
import org.nuxeo.ecm.core.schema.DocumentType;
import org.nuxeo.ecm.core.schema.DocumentTypeImpl;
import org.nuxeo.ecm.core.versioning.DocumentVersion;
import org.nuxeo.ecm.core.versioning.DocumentVersionIterator;
/**
* @author <a href="mailto:[email protected]">Anahide Tchertchian</a>
*
*/
public class FakeDocument implements Document {
final String uuid;
public FakeDocument(String uuid) {
this.uuid = uuid;
}
public String getName() throws DocumentException {
return uuid;
}
public String getUUID() throws DocumentException {
return uuid;
}
public Session getSession() {
return new FakeSession();
}
public DocumentType getType() {
return new DocumentTypeImpl((DocumentType) null, "FakeDocument");
}
public String getPath() throws DocumentException {
return "/path/" + uuid;
}
public Repository getRepository() {
return new FakeRepository();
}
// not implemented (useless)
public boolean followTransition(String transition)
throws LifeCycleException {
return false;
}
public Collection<String> getAllowedStateTransitions()
throws LifeCycleException {
return null;
}
public String getCurrentLifeCycleState() throws LifeCycleException {
return null;
}
public Calendar getLastModified() throws DocumentException {
return null;
}
public String getLifeCyclePolicy() throws LifeCycleException {
return null;
}
public Document getParent() throws DocumentException {
return null;
}
public <T extends Serializable> T getSystemProp(String name, Class<T> type)
throws DocumentException {
return null;
}
public boolean isDirty() throws DocumentException {
return false;
}
public boolean isFolder() {
return false;
}
public boolean isProxy() {
return false;
}
public void remove() throws DocumentException {
}
public void save() throws DocumentException {
}
public void setDirty(boolean value) throws DocumentException {
}
public <T extends Serializable> void setSystemProp(String name, T value)
throws DocumentException {
}
public void checkIn(String label, String description)
throws DocumentException {
}
public void checkIn(String label) throws DocumentException {
}
public void checkOut() throws DocumentException {
}
public DocumentVersion getLastVersion() throws DocumentException {
return null;
}
public Document getSourceDocument() throws DocumentException {
return null;
}
public Document getVersion(String label) throws DocumentException {
return null;
}
public DocumentVersionIterator getVersions() throws DocumentException {
return null;
}
public boolean hasVersions() throws DocumentException {
return false;
}
public boolean isCheckedOut() throws DocumentException {
return false;
}
public boolean isVersion() {
return false;
}
public void restore(String label) throws DocumentException {
}
public Document addChild(String name, String typeName)
throws DocumentException {
return null;
}
public Document getChild(String name) throws DocumentException {
return null;
}
public Iterator<Document> getChildren() throws DocumentException {
return null;
}
public DocumentIterator getChildren(int start) throws DocumentException {
return null;
}
public List<String> getChildrenIds() throws DocumentException {
return null;
}
public boolean hasChild(String name) throws DocumentException {
return false;
}
public boolean hasChildren() throws DocumentException {
return false;
}
public void removeChild(String name) throws DocumentException {
}
public Document resolvePath(String relPath) throws DocumentException {
return null;
}
public Map<String, Object> exportFlatMap(String[] schemas)
throws DocumentException {
return null;
}
public Map<String, Object> exportMap(String schemaName)
throws DocumentException {
return null;
}
public Map<String, Map<String, Object>> exportMap(String[] schemas)
throws DocumentException {
return null;
}
public boolean getBoolean(String name) throws DocumentException {
return false;
}
public Blob getContent(String name) throws DocumentException {
return null;
}
public Calendar getDate(String name) throws DocumentException {
return null;
}
public List<String> getDirtyFields() {
return null;
}
public double getDouble(String name) throws DocumentException {
return 0;
}
public long getLong(String name) throws DocumentException {
return 0;
}
public Collection<Property> getProperties() throws DocumentException {
return null;
}
public Property getProperty(String name) throws DocumentException {
return null;
}
public Iterator<Property> getPropertyIterator() throws DocumentException {
return null;
}
public Object getPropertyValue(String name) throws DocumentException {
return null;
}
public String getString(String name) throws DocumentException {
return null;
}
public void importFlatMap(Map<String, Object> map) throws DocumentException {
}
public void importMap(Map<String, Map<String, Object>> map)
throws DocumentException {
}
public boolean isPropertySet(String path) throws DocumentException {
return false;
}
public void removeProperty(String name) throws DocumentException {
}
public void setBoolean(String name, boolean value) throws DocumentException {
}
public void setContent(String name, Blob value) throws DocumentException {
}
public void setDate(String name, Calendar value) throws DocumentException {
}
public void setDouble(String name, double value) throws DocumentException {
}
public void setLong(String name, long value) throws DocumentException {
}
public void setPropertyValue(String name, Object value)
throws DocumentException {
}
public void setString(String name, String value) throws DocumentException {
}
public String getLock() throws DocumentException {
return null;
}
public boolean isLocked() throws DocumentException {
return false;
}
public void setLock(String key) throws DocumentException {
}
public String unlock() throws DocumentException {
return null;
}
public void readDocumentPart(DocumentPart dp) throws Exception {
}
public void writeDocumentPart(DocumentPart dp) throws Exception {
}
public void orderBefore(String src, String dest) throws DocumentException {
}
}
| NXP-2265: Method to efficiently get version ids
| nuxeo-platform-relations-io/src/test/java/org/nuxeo/ecm/platform/relations/io/test/FakeDocument.java | NXP-2265: Method to efficiently get version ids |
|
Java | apache-2.0 | 03ec1f21a3592f16c0d8c7c76bc79b541c360cb0 | 0 | hurricup/intellij-community,ibinti/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,allotria/intellij-community,asedunov/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,asedunov/intellij-community,FHannes/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,fitermay/intellij-community,vvv1559/intellij-community,ThiagoGarciaAlves/intellij-community,michaelgallacher/intellij-community,da1z/intellij-community,youdonghai/intellij-community,hurricup/intellij-community,michaelgallacher/intellij-community,signed/intellij-community,ibinti/intellij-community,signed/intellij-community,semonte/intellij-community,allotria/intellij-community,apixandru/intellij-community,da1z/intellij-community,ibinti/intellij-community,fitermay/intellij-community,da1z/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,apixandru/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,ThiagoGarciaAlves/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,apixandru/intellij-community,fitermay/intellij-community,semonte/intellij-community,ibinti/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,hurricup/intellij-community,da1z/intellij-community,youdonghai/intellij-community,suncycheng/intellij-community,da1z/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,allotria/intellij-community,FHannes/intellij-community,hurricup/intellij-community,xfournet/intellij-community,mglukhikh/intellij-community,ibinti/intellij-community,ibinti/intellij-community,allotria/intellij-community,apixandru/intellij-community,asedunov/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,ibinti/intellij-community,asedunov/intellij-community,da1z/intellij-community,asedunov/intellij-community,ibinti/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,xfournet/intellij-community,ibinti/intellij-community,semonte/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,asedunov/intellij-community,da1z/intellij-community,FHannes/intellij-community,vvv1559/intellij-community,asedunov/intellij-community,semonte/intellij-community,youdonghai/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,ibinti/intellij-community,FHannes/intellij-community,mglukhikh/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,fitermay/intellij-community,fitermay/intellij-community,idea4bsd/idea4bsd,signed/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,idea4bsd/idea4bsd,hurricup/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,xfournet/intellij-community,signed/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,fitermay/intellij-community,ibinti/intellij-community,allotria/intellij-community,apixandru/intellij-community,signed/intellij-community,suncycheng/intellij-community,semonte/intellij-community,ThiagoGarciaAlves/intellij-community,ThiagoGarciaAlves/intellij-community,da1z/intellij-community,hurricup/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,FHannes/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,allotria/intellij-community,semonte/intellij-community,asedunov/intellij-community,youdonghai/intellij-community,ibinti/intellij-community,hurricup/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,semonte/intellij-community,semonte/intellij-community,hurricup/intellij-community,ThiagoGarciaAlves/intellij-community,FHannes/intellij-community,semonte/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,allotria/intellij-community,youdonghai/intellij-community,apixandru/intellij-community,FHannes/intellij-community,hurricup/intellij-community,idea4bsd/idea4bsd,da1z/intellij-community,hurricup/intellij-community,signed/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,vvv1559/intellij-community,xfournet/intellij-community,allotria/intellij-community,suncycheng/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,mglukhikh/intellij-community,FHannes/intellij-community,michaelgallacher/intellij-community,xfournet/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,xfournet/intellij-community,allotria/intellij-community,semonte/intellij-community,apixandru/intellij-community,allotria/intellij-community,mglukhikh/intellij-community,mglukhikh/intellij-community,semonte/intellij-community,fitermay/intellij-community,da1z/intellij-community,suncycheng/intellij-community,asedunov/intellij-community,vvv1559/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,signed/intellij-community,signed/intellij-community,mglukhikh/intellij-community,suncycheng/intellij-community,signed/intellij-community,da1z/intellij-community,youdonghai/intellij-community,signed/intellij-community,signed/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,apixandru/intellij-community,fitermay/intellij-community,xfournet/intellij-community,signed/intellij-community,allotria/intellij-community,youdonghai/intellij-community,semonte/intellij-community,da1z/intellij-community,xfournet/intellij-community,ibinti/intellij-community,apixandru/intellij-community,michaelgallacher/intellij-community,michaelgallacher/intellij-community | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testFramework;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.xml.DOMConfigurator;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.io.StringReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@SuppressWarnings({"CallToPrintStackTrace", "UseOfSystemOutOrSystemErr"})
public class TestLoggerFactory implements Logger.Factory {
private static final String SYSTEM_MACRO = "$SYSTEM_DIR$";
private static final String APPLICATION_MACRO = "$APPLICATION_DIR$";
private static final String LOG_DIR_MACRO = "$LOG_DIR$";
private static final String LOG_DIR = "testlog";
private static final long LOG_SIZE_LIMIT = 100 * 1024 * 1024;
private static final long LOG_SEEK_WINDOW = 100 * 1024;
private boolean myInitialized;
private TestLoggerFactory() { }
@NotNull
@Override
public synchronized Logger getLoggerInstance(@NotNull final String name) {
if (!myInitialized) {
init();
}
return new TestLogger(org.apache.log4j.Logger.getLogger(name));
}
private void init() {
try {
File logXmlFile = new File(PathManager.getHomePath(), "test-log.xml");
if (!logXmlFile.exists()) {
logXmlFile = new File(PathManager.getBinPath(), "log.xml");
}
if (!logXmlFile.exists()) {
return;
}
final String logDir = getTestLogDir();
String text = FileUtil.loadFile(logXmlFile);
text = StringUtil.replace(text, SYSTEM_MACRO, StringUtil.replace(PathManager.getSystemPath(), "\\", "\\\\"));
text = StringUtil.replace(text, APPLICATION_MACRO, StringUtil.replace(PathManager.getHomePath(), "\\", "\\\\"));
text = StringUtil.replace(text, LOG_DIR_MACRO, StringUtil.replace(logDir, "\\", "\\\\"));
final File logDirFile = new File(logDir);
if (!logDirFile.mkdirs() && !logDirFile.exists()) {
throw new IOException("Unable to create log dir: " + logDirFile);
}
System.setProperty("log4j.defaultInitOverride", "true");
try {
final DOMConfigurator domConfigurator = new DOMConfigurator();
domConfigurator.doConfigure(new StringReader(text), LogManager.getLoggerRepository());
}
catch (ClassCastException e) {
// shit :-E
System.err.println("log.xml content:\n" + text);
throw e;
}
File ideaLog = new File(getTestLogDir(), "idea.log");
if (ideaLog.exists() && ideaLog.length() >= LOG_SIZE_LIMIT) {
FileUtil.writeToFile(ideaLog, "");
}
myInitialized = true;
}
catch (Exception e) {
e.printStackTrace();
}
}
public static String getTestLogDir() {
return PathManager.getSystemPath() + "/" + LOG_DIR;
}
public static void dumpLogToStdout(@NotNull String testStartMarker) {
File ideaLog = new File(getTestLogDir(), "idea.log");
if (ideaLog.exists()) {
try {
long length = ideaLog.length();
String logText;
if (length > LOG_SEEK_WINDOW) {
try (RandomAccessFile file = new RandomAccessFile(ideaLog, "r")) {
file.seek(length - LOG_SEEK_WINDOW);
byte[] bytes = new byte[(int)LOG_SEEK_WINDOW];
int read = file.read(bytes);
logText = new String(bytes, 0, read);
}
}
else {
logText = FileUtil.loadFile(ideaLog);
}
System.out.println("\n\nIdea Log:");
Pattern logStart = Pattern.compile("[0-9\\-, :\\[\\]]+(DEBUG|INFO|ERROR) - ");
for (String line : StringUtil.splitByLines(logText.substring(Math.max(0, logText.lastIndexOf(testStartMarker))))) {
Matcher matcher = logStart.matcher(line);
int lineStart = matcher.lookingAt() ? matcher.end() : 0;
System.out.println(line.substring(lineStart));
}
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
public static void enableDebugLogging(@NotNull Disposable parentDisposable, @NotNull String... categories) {
for (String category : categories) {
final Logger logger = Logger.getInstance(category);
logger.setLevel(Level.DEBUG);
Disposer.register(parentDisposable, () -> logger.setLevel(Level.INFO));
}
}
} | platform/testFramework/src/com/intellij/testFramework/TestLoggerFactory.java | /*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.testFramework;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.xml.DOMConfigurator;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.io.StringReader;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@SuppressWarnings({"CallToPrintStackTrace", "UseOfSystemOutOrSystemErr"})
public class TestLoggerFactory implements Logger.Factory {
private static final String SYSTEM_MACRO = "$SYSTEM_DIR$";
private static final String APPLICATION_MACRO = "$APPLICATION_DIR$";
private static final String LOG_DIR_MACRO = "$LOG_DIR$";
private static final String LOG_DIR = "testlog";
private static final long LOG_SIZE_LIMIT = 100 * 1024 * 1024;
private static final long LOG_SEEK_WINDOW = 100 * 1024;
private boolean myInitialized = false;
private TestLoggerFactory() { }
@NotNull
@Override
public synchronized Logger getLoggerInstance(@NotNull final String name) {
if (!myInitialized) {
init();
}
return new TestLogger(org.apache.log4j.Logger.getLogger(name));
}
private void init() {
try {
File logXmlFile = new File(PathManager.getHomePath(), "test-log.xml");
if (!logXmlFile.exists()) {
logXmlFile = new File(PathManager.getBinPath(), "log.xml");
}
if (!logXmlFile.exists()) {
return;
}
final String logDir = getTestLogDir();
String text = FileUtil.loadFile(logXmlFile);
text = StringUtil.replace(text, SYSTEM_MACRO, StringUtil.replace(PathManager.getSystemPath(), "\\", "\\\\"));
text = StringUtil.replace(text, APPLICATION_MACRO, StringUtil.replace(PathManager.getHomePath(), "\\", "\\\\"));
text = StringUtil.replace(text, LOG_DIR_MACRO, StringUtil.replace(logDir, "\\", "\\\\"));
final File logDirFile = new File(logDir);
if (!logDirFile.mkdirs() && !logDirFile.exists()) {
throw new IOException("Unable to create log dir: " + logDirFile);
}
System.setProperty("log4j.defaultInitOverride", "true");
final DOMConfigurator domConfigurator = new DOMConfigurator();
try {
domConfigurator.doConfigure(new StringReader(text), LogManager.getLoggerRepository());
}
catch (ClassCastException e) {
// shit :-E
System.err.println("log.xml content:\n" + text);
throw e;
}
File ideaLog = new File(getTestLogDir(), "idea.log");
if (ideaLog.exists() && ideaLog.length() >= LOG_SIZE_LIMIT) {
FileUtil.writeToFile(ideaLog, "");
}
myInitialized = true;
}
catch (Exception e) {
e.printStackTrace();
}
}
public static String getTestLogDir() {
return PathManager.getSystemPath() + "/" + LOG_DIR;
}
public static void dumpLogToStdout(@NotNull String testStartMarker) {
File ideaLog = new File(getTestLogDir(), "idea.log");
if (ideaLog.exists()) {
try {
long length = ideaLog.length();
String logText;
if (length > LOG_SEEK_WINDOW) {
RandomAccessFile file = new RandomAccessFile(ideaLog, "r");
try {
file.seek(length - LOG_SEEK_WINDOW);
byte[] bytes = new byte[(int)LOG_SEEK_WINDOW];
int read = file.read(bytes);
logText = new String(bytes, 0, read);
}
finally {
file.close();
}
}
else {
logText = FileUtil.loadFile(ideaLog);
}
Pattern logStart = Pattern.compile("[0-9\\-, :\\[\\]]+(DEBUG|INFO|ERROR) - ");
System.out.println("\n\nIdea Log:");
for (String line : StringUtil.splitByLines(logText.substring(Math.max(0, logText.lastIndexOf(testStartMarker))))) {
Matcher matcher = logStart.matcher(line);
int lineStart = matcher.lookingAt() ? matcher.end() : 0;
System.out.println(line.substring(lineStart));
}
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
}
public static void enableDebugLogging(@NotNull Disposable parentDisposable, String... categories) {
for (String category : categories) {
final Logger logger = Logger.getInstance(category);
logger.setLevel(Level.DEBUG);
Disposer.register(parentDisposable, new Disposable() {
@Override
public void dispose() {
logger.setLevel(Level.INFO);
}
});
}
}
} | cleanup
| platform/testFramework/src/com/intellij/testFramework/TestLoggerFactory.java | cleanup |
|
Java | apache-2.0 | ace963312fad0064bbbd6ee3572e2c59c396f966 | 0 | crate/crate,crate/crate,crate/crate | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine;
import static java.util.Collections.shuffle;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_RESET;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA;
import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.isIn;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.Phaser;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.IntSupplier;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import java.util.function.ToLongBiFunction;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LiveIndexWriterConfig;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.ReferenceManager;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
import org.elasticsearch.index.seqno.ReplicationTracker;
import org.elasticsearch.index.seqno.RetentionLease;
import org.elasticsearch.index.seqno.RetentionLeases;
import org.elasticsearch.index.seqno.SeqNoStats;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.SnapshotMatchers;
import org.elasticsearch.index.translog.TestTranslog;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.VersionUtils;
import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.Matcher;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import io.crate.common.collections.Tuple;
import io.crate.common.io.IOUtils;
import io.crate.common.unit.TimeValue;
public class InternalEngineTests extends EngineTestCase {
static final long UNSET_AUTO_GENERATED_TIMESTAMP = -1L;
@Test
public void testVersionMapAfterAutoIDDocument() throws IOException {
engine.refresh("warm_up");
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = randomBoolean() ?
appendOnlyPrimary(doc, false, 1)
: appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
engine.index(operation);
assertFalse(engine.isSafeAccessRequired());
doc = testParsedDocument("1", null, testDocumentWithTextField("updated"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index update = indexForDoc(doc);
engine.index(update);
assertTrue(engine.isSafeAccessRequired());
assertThat(engine.getVersionMap().values(), hasSize(1));
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(0, searcher.getIndexReader().numDocs());
}
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
assertEquals(1, searcher.getIndexReader().numDocs());
TopDocs search = searcher.search(new MatchAllDocsQuery(), 1);
org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc);
assertEquals("test", luceneDoc.get("value"));
}
// now lets make this document visible
engine.refresh("test");
if (randomBoolean()) { // random empty refresh
engine.refresh("test");
}
assertTrue("safe access should be required we carried it over", engine.isSafeAccessRequired());
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(1, searcher.getIndexReader().numDocs());
TopDocs search = searcher.search(new MatchAllDocsQuery(), 1);
org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc);
assertEquals("updated", luceneDoc.get("value"));
}
doc = testParsedDocument("2", null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
operation = randomBoolean() ?
appendOnlyPrimary(doc, false, 1)
: appendOnlyReplica(doc, false, 1, generateNewSeqNo(engine));
engine.index(operation);
assertTrue("safe access should be required", engine.isSafeAccessRequired());
assertThat(engine.getVersionMap().values(), hasSize(1)); // now we add this to the map
engine.refresh("test");
if (randomBoolean()) { // randomly refresh here again
engine.refresh("test");
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(2, searcher.getIndexReader().numDocs());
}
if (operation.origin() == PRIMARY) {
assertFalse("safe access should NOT be required last indexing round was only append only", engine.isSafeAccessRequired());
}
engine.delete(new Engine.Delete(
operation.id(),
operation.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
assertTrue("safe access should be required", engine.isSafeAccessRequired());
engine.refresh("test");
assertTrue("safe access should be required", engine.isSafeAccessRequired());
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(1, searcher.getIndexReader().numDocs());
}
}
@Test
public void testSegmentsWithoutSoftDeletes() throws Exception {
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
List<Segment> segments = engine.segments(false);
assertThat(segments.isEmpty(), equalTo(true));
// create two docs and refresh
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
Engine.Index first = indexForDoc(doc);
Engine.IndexResult firstResult = engine.index(first);
ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_2, null);
Engine.Index second = indexForDoc(doc2);
Engine.IndexResult secondResult = engine.index(second);
assertThat(secondResult.getTranslogLocation(), greaterThan(firstResult.getTranslogLocation()));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).isCommitted(), equalTo(false));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(0).ramTree, nullValue());
assertThat(segments.get(0).getAttributes().keySet(), Matchers.contains(Lucene87StoredFieldsFormat.MODE_KEY));
engine.flush();
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
ParsedDocument doc3 = testParsedDocument("3", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc3));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
ParsedDocument doc4 = testParsedDocument("4", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc4));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
// internal refresh - lets make sure we see those segments in the stats
ParsedDocument doc5 = testParsedDocument("5", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc5));
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
segments = engine.segments(false);
assertThat(segments.size(), equalTo(4));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
assertThat(segments.get(3).isCommitted(), equalTo(false));
assertThat(segments.get(3).isSearch(), equalTo(false));
assertThat(segments.get(3).getNumDocs(), equalTo(1));
assertThat(segments.get(3).getDeletedDocs(), equalTo(0));
assertThat(segments.get(3).isCompound(), equalTo(true));
// now refresh the external searcher and make sure it has the new segment
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(4));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
assertThat(segments.get(3).isCommitted(), equalTo(false));
assertThat(segments.get(3).isSearch(), equalTo(true));
assertThat(segments.get(3).getNumDocs(), equalTo(1));
assertThat(segments.get(3).getDeletedDocs(), equalTo(0));
assertThat(segments.get(3).isCompound(), equalTo(true));
}
}
@Test
public void testVerboseSegments() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(true);
assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
segments = engine.segments(true);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_2, null);
engine.index(indexForDoc(doc2));
engine.refresh("test");
ParsedDocument doc3 = testParsedDocument("3", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc3));
engine.refresh("test");
segments = engine.segments(true);
assertThat(segments.size(), equalTo(3));
assertThat(segments.get(0).ramTree, notNullValue());
assertThat(segments.get(1).ramTree, notNullValue());
assertThat(segments.get(2).ramTree, notNullValue());
}
}
@Test
public void testSegmentsWithMergeFlag() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new TieredMergePolicy())) {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index index = indexForDoc(doc);
engine.index(index);
engine.flush();
assertThat(engine.segments(false).size(), equalTo(1));
index = indexForDoc(testParsedDocument("2", null, testDocument(), B_1, null));
engine.index(index);
engine.flush();
List<Segment> segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = indexForDoc(testParsedDocument("3", null, testDocument(), B_1, null));
engine.index(index);
engine.flush();
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = indexForDoc(doc);
engine.index(index);
engine.flush();
final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration();
// now, optimize and wait for merges, see that we have no merge flag
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
for (Segment segment : engine.segments(false)) {
assertThat(segment.getMergeId(), nullValue());
}
// we could have multiple underlying merges, so the generation may increase more than once
assertTrue(store.readLastCommittedSegmentsInfo().getGeneration() > gen1);
final boolean flush = randomBoolean();
final long gen2 = store.readLastCommittedSegmentsInfo().getGeneration();
engine.forceMerge(flush, 1, false, false, false, UUIDs.randomBase64UUID());
for (Segment segment : engine.segments(false)) {
assertThat(segment.getMergeId(), nullValue());
}
if (flush) {
// we should have had just 1 merge, so last generation should be exact
assertEquals(gen2, store.readLastCommittedSegmentsInfo().getLastGeneration());
}
}
}
@Test
public void testSegmentsWithSoftDeletes() throws Exception {
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null,
null, globalCheckpoint::get))) {
assertThat(engine.segments(false), empty());
int numDocsFirstSegment = randomIntBetween(5, 50);
Set<String> liveDocsFirstSegment = new HashSet<>();
for (int i = 0; i < numDocsFirstSegment; i++) {
String id = Integer.toString(i);
ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocsFirstSegment.add(id);
}
engine.refresh("test");
List<Segment> segments = engine.segments(randomBoolean());
assertThat(segments, hasSize(1));
assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertFalse(segments.get(0).committed);
int deletes = 0;
int updates = 0;
int appends = 0;
int iterations = scaledRandomIntBetween(1, 50);
for (int i = 0; i < iterations && liveDocsFirstSegment.isEmpty() == false; i++) {
String idToUpdate = randomFrom(liveDocsFirstSegment);
liveDocsFirstSegment.remove(idToUpdate);
ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null);
if (randomBoolean()) {
engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get()));
deletes++;
} else {
engine.index(indexForDoc(doc));
updates++;
}
if (randomBoolean()) {
engine.index(indexForDoc(testParsedDocument(UUIDs.randomBase64UUID(), null, testDocument(), B_1, null)));
appends++;
}
}
boolean committed = randomBoolean();
if (committed) {
engine.flush();
}
engine.refresh("test");
segments = engine.segments(randomBoolean());
assertThat(segments, hasSize(2));
assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
assertThat(segments.get(0).getDeletedDocs(), equalTo(updates + deletes));
assertThat(segments.get(0).committed, equalTo(committed));
assertThat(segments.get(1).getNumDocs(), equalTo(updates + appends));
assertThat(segments.get(1).getDeletedDocs(), equalTo(deletes)); // delete tombstones
assertThat(segments.get(1).committed, equalTo(committed));
}
}
@Test
public void testCommitStats() throws IOException {
final AtomicLong maxSeqNo = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong localCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong globalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO);
try (
Store store = createStore();
InternalEngine engine = createEngine(store, createTempDir(), (maxSeq, localCP) -> new LocalCheckpointTracker(
maxSeq,
localCP) {
@Override
public long getMaxSeqNo() {
return maxSeqNo.get();
}
@Override
public long getProcessedCheckpoint() {
return localCheckpoint.get();
}
}
)) {
CommitStats stats1 = engine.commitStats();
assertThat(stats1.getGeneration(), greaterThan(0L));
assertThat(stats1.getId(), notNullValue());
assertThat(stats1.getUserData(), hasKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
assertThat(
Long.parseLong(stats1.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
equalTo(SequenceNumbers.NO_OPS_PERFORMED));
assertThat(stats1.getUserData(), hasKey(SequenceNumbers.MAX_SEQ_NO));
assertThat(
Long.parseLong(stats1.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
equalTo(SequenceNumbers.NO_OPS_PERFORMED));
maxSeqNo.set(rarely() ? SequenceNumbers.NO_OPS_PERFORMED : randomIntBetween(0, 1024));
localCheckpoint.set(
rarely() || maxSeqNo.get() == SequenceNumbers.NO_OPS_PERFORMED ?
SequenceNumbers.NO_OPS_PERFORMED : randomIntBetween(0, 1024));
globalCheckpoint.set(rarely() || localCheckpoint.get() == SequenceNumbers.NO_OPS_PERFORMED ?
UNASSIGNED_SEQ_NO : randomIntBetween(0, (int) localCheckpoint.get()));
final Engine.CommitId commitId = engine.flush(true, true);
CommitStats stats2 = engine.commitStats();
assertThat(stats2.getRawCommitId(), equalTo(commitId));
assertThat(stats2.getGeneration(), greaterThan(stats1.getGeneration()));
assertThat(stats2.getId(), notNullValue());
assertThat(stats2.getId(), not(equalTo(stats1.getId())));
assertThat(stats2.getUserData(), hasKey(Translog.TRANSLOG_UUID_KEY));
assertThat(stats2.getUserData().get(Translog.TRANSLOG_UUID_KEY),
equalTo(stats1.getUserData().get(Translog.TRANSLOG_UUID_KEY)));
assertThat(Long.parseLong(stats2.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), equalTo(localCheckpoint.get()));
assertThat(stats2.getUserData(), hasKey(SequenceNumbers.MAX_SEQ_NO));
assertThat(Long.parseLong(stats2.getUserData().get(SequenceNumbers.MAX_SEQ_NO)), equalTo(maxSeqNo.get()));
}
}
@Test
public void testFlushIsDisabledDuringTranslogRecovery() throws IOException {
engine.ensureCanFlush(); // recovered already
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
engine.close();
engine = new InternalEngine(engine.config());
expectThrows(IllegalStateException.class, engine::ensureCanFlush);
expectThrows(IllegalStateException.class, () -> engine.flush(true, true));
if (randomBoolean()) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
} else {
engine.skipTranslogRecovery();
}
engine.ensureCanFlush(); // ready
doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
engine.flush();
}
@Test
public void testTranslogMultipleOperationsSameDocument() throws IOException {
final int ops = randomIntBetween(1, 32);
Engine initialEngine;
final List<Engine.Operation> operations = new ArrayList<>();
try {
initialEngine = engine;
for (int i = 0; i < ops; i++) {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
if (randomBoolean()) {
final Engine.Index operation = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
i,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
-1,
false,
UNASSIGNED_SEQ_NO,
0
);
operations.add(operation);
initialEngine.index(operation);
} else {
final Engine.Delete operation = new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
0,
i,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
operations.add(operation);
initialEngine.delete(operation);
}
}
} finally {
IOUtils.close(engine);
}
try (Engine recoveringEngine = new InternalEngine(engine.config())) {
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
recoveringEngine.refresh("test");
try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector);
assertThat(collector.getTotalHits(), equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1));
}
}
}
@Test
public void testTranslogRecoveryDoesNotReplayIntoTranslog() throws IOException {
final int docs = randomIntBetween(1, 32);
Engine initialEngine = null;
try {
initialEngine = engine;
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
initialEngine.index(indexForDoc(doc));
}
} finally {
IOUtils.close(initialEngine);
}
Engine recoveringEngine = null;
try {
final AtomicBoolean committed = new AtomicBoolean();
recoveringEngine = new InternalEngine(initialEngine.config()) {
@Override
protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException {
committed.set(true);
super.commitIndexWriter(writer, translog, syncId);
}
};
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertTrue(committed.get());
} finally {
IOUtils.close(recoveringEngine);
}
}
@Test
public void testTranslogRecoveryWithMultipleGenerations() throws IOException {
final int docs = randomIntBetween(1, 4096);
final List<Long> seqNos = LongStream.range(0, docs).boxed().collect(Collectors.toList());
Randomness.shuffle(seqNos);
Engine initialEngine = null;
Engine recoveringEngine = null;
Store store = createStore();
final AtomicInteger counter = new AtomicInteger();
try {
initialEngine = createEngine(
store,
createTempDir(),
LocalCheckpointTracker::new,
(engine, operation) -> seqNos.get(counter.getAndIncrement()));
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
initialEngine.index(indexForDoc(doc));
if (rarely()) {
getTranslog(initialEngine).rollGeneration();
} else if (rarely()) {
initialEngine.flush();
}
}
initialEngine.close();
recoveringEngine = new InternalEngine(initialEngine.config());
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
recoveringEngine.refresh("test");
try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs);
assertEquals(docs, topDocs.totalHits.value);
}
} finally {
IOUtils.close(initialEngine, recoveringEngine, store);
}
}
@Test
public void testRecoveryFromTranslogUpToSeqNo() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(),
null, null, globalCheckpoint::get);
final long maxSeqNo;
try (InternalEngine engine = createEngine(config)) {
final int docs = randomIntBetween(1, 100);
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(),
SOURCE, null);
engine.index(indexForDoc(doc));
if (rarely()) {
engine.rollTranslogGeneration();
} else if (rarely()) {
engine.flush(randomBoolean(), true);
}
}
maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo();
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getProcessedLocalCheckpoint()));
engine.syncTranslog();
}
try (InternalEngine engine = new InternalEngine(config)) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertThat(engine.getProcessedLocalCheckpoint(), equalTo(maxSeqNo));
assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(maxSeqNo));
}
try (InternalEngine engine = new InternalEngine(config)) {
long upToSeqNo = randomLongBetween(globalCheckpoint.get(), maxSeqNo);
engine.recoverFromTranslog(translogHandler, upToSeqNo);
assertThat(engine.getProcessedLocalCheckpoint(), equalTo(upToSeqNo));
assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(upToSeqNo));
}
}
}
@Test
public void testConcurrentGetAndFlush() throws Exception {
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
final AtomicReference<Engine.GetResult> latestGetResult = new AtomicReference<>();
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
latestGetResult.set(engine.get(newGet(doc), searcherFactory));
final AtomicBoolean flushFinished = new AtomicBoolean(false);
final CyclicBarrier barrier = new CyclicBarrier(2);
Thread getThread = new Thread(() -> {
try {
barrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
throw new RuntimeException(e);
}
while (flushFinished.get() == false) {
Engine.GetResult previousGetResult = latestGetResult.get();
if (previousGetResult != null) {
previousGetResult.close();
}
latestGetResult.set(engine.get(newGet(doc), searcherFactory));
if (latestGetResult.get().docIdAndVersion() == null) {
break;
}
}
});
getThread.start();
barrier.await();
engine.flush();
flushFinished.set(true);
getThread.join();
assertThat(latestGetResult.get().docIdAndVersion(), is(notNullValue()));
latestGetResult.get().close();
}
@Test
public void testSimpleOperations() throws Exception {
engine.refresh("warm_up");
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
searchResult.close();
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
searchResult.close();
// we can get it in realtime
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_2, null);
engine.index(indexForDoc(doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// but, we can still get it (in realtime)
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
// refresh and it should be updated
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
// now delete
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// its not deleted yet
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
// but, get should not see it (in realtime)
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(nullValue()));
}
// refresh and it should be deleted
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// add it back
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// now flush
engine.flush();
// and, verify get (in real time)
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
// make sure we can still work with the engine
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// refresh and it should be updated
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
}
public void testSearchResultRelease() throws Exception {
engine.refresh("warm_up");
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
searchResult.close();
// create a document
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
searchResult.close();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
// don't release the search result yet...
// delete, refresh and do a new search, it should not be there
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
engine.refresh("test");
Engine.Searcher updateSearchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
updateSearchResult.close();
// the non release search result should not see the deleted yet...
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
}
@Test
public void testCommitAdvancesMinTranslogForRecovery() throws IOException {
IOUtils.close(engine, store);
final Path translogPath = createTempDir();
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final LongSupplier globalCheckpointSupplier = () -> globalCheckpoint.get();
engine = createEngine(config(defaultSettings, store, translogPath, newMergePolicy(), null, null,
globalCheckpointSupplier));
engine.onSettingsChanged(TimeValue.MINUS_ONE, ByteSizeValue.ZERO, randomNonNegativeLong());
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
boolean inSync = randomBoolean();
if (inSync) {
engine.syncTranslog(); // to advance persisted local checkpoint
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
}
engine.flush();
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(inSync ? 3L : 2L));
engine.flush();
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(inSync ? 3L : 2L));
engine.flush(true, true);
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(inSync ? 3L : 2L));
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.flush(true, true);
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(3L));
}
@Test
public void testSyncedFlush() throws IOException {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null)) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
Engine.CommitId commitID = engine.flush();
assertThat(commitID, equalTo(new Engine.CommitId(store.readLastCommittedSegmentsInfo().getId())));
byte[] wrongBytes = Base64.getDecoder().decode(commitID.toString());
wrongBytes[0] = (byte) ~wrongBytes[0];
Engine.CommitId wrongId = new Engine.CommitId(wrongBytes);
assertEquals("should fail to sync flush with wrong id (but no docs)", engine.syncFlush(syncId + "1", wrongId),
Engine.SyncedFlushResult.COMMIT_MISMATCH);
engine.index(indexForDoc(doc));
assertEquals("should fail to sync flush with right id but pending doc",
engine.syncFlush(syncId + "2", commitID), Engine.SyncedFlushResult.PENDING_OPERATIONS);
commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
}
}
@Test
public void testRenewSyncFlush() throws Exception {
final int iters = randomIntBetween(2, 5); // run this a couple of times to get some coverage
for (int i = 0; i < iters; i++) {
try (Store store = createStore();
InternalEngine engine =
createEngine(config(defaultSettings, store, createTempDir(), new LogDocMergePolicy(), null))) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
Engine.Index doc1 =
indexForDoc(testParsedDocument("1", null, testDocumentWithTextField(), B_1, null));
engine.index(doc1);
assertEquals(engine.getLastWriteNanos(), doc1.startTime());
engine.flush();
Engine.Index doc2 =
indexForDoc(testParsedDocument("2", null, testDocumentWithTextField(), B_1, null));
engine.index(doc2);
assertEquals(engine.getLastWriteNanos(), doc2.startTime());
engine.flush();
final boolean forceMergeFlushes = randomBoolean();
final ParsedDocument parsedDoc3 =
testParsedDocument("3", null, testDocumentWithTextField(), B_1, null);
if (forceMergeFlushes) {
engine.index(new Engine.Index(newUid(parsedDoc3), parsedDoc3, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY,
System.nanoTime() - engine.engineConfig.getFlushMergesAfter().nanos(),
-1, false, UNASSIGNED_SEQ_NO, 0));
} else {
engine.index(indexForDoc(parsedDoc3));
}
Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(3, engine.segments(false).size());
engine.forceMerge(forceMergeFlushes, 1, false, false, false, UUIDs.randomBase64UUID());
if (forceMergeFlushes == false) {
engine.refresh("make all segments visible");
assertEquals(4, engine.segments(false).size());
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertTrue(engine.tryRenewSyncCommit());
assertEquals(1, engine.segments(false).size());
} else {
engine.refresh("test");
assertBusy(() -> assertEquals(1, engine.segments(false).size()));
}
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
if (randomBoolean()) {
Engine.Index doc4 =
indexForDoc(testParsedDocument("4", null, testDocumentWithTextField(), B_1, null));
engine.index(doc4);
assertEquals(engine.getLastWriteNanos(), doc4.startTime());
} else {
Engine.Delete delete = new Engine.Delete(
doc1.id(),
doc1.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
engine.delete(delete);
assertEquals(engine.getLastWriteNanos(), delete.startTime());
}
assertFalse(engine.tryRenewSyncCommit());
// we might hit a concurrent flush from a finishing merge here - just wait if ongoing...
engine.flush(false, true);
assertNull(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID));
assertNull(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID));
}
}
}
@Test
public void testSyncedFlushSurvivesEngineRestart() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
IOUtils.close(store, engine);
store = createStore();
engine = createEngine(store, primaryTranslogDir, globalCheckpoint::get);
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}"), null);
engine.index(indexForDoc(doc));
globalCheckpoint.set(0L);
final Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
EngineConfig config = engine.config();
if (randomBoolean()) {
engine.close();
} else {
engine.flushAndClose();
}
if (randomBoolean()) {
final String translogUUID = Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
UNASSIGNED_SEQ_NO, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
}
engine = new InternalEngine(config);
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
}
@Test
public void testSyncedFlushVanishesOnReplay() throws IOException {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", null,
testDocumentWithTextField(), new BytesArray("{}"), null);
engine.index(indexForDoc(doc));
final Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
doc = testParsedDocument("2", null, testDocumentWithTextField(), new BytesArray("{}"), null);
engine.index(indexForDoc(doc));
EngineConfig config = engine.config();
engine.close();
engine = new InternalEngine(config);
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertNull("Sync ID must be gone since we have a document to replay",
engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID));
}
@Test
public void testVersioningNewCreate() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
}
@Test
public void testReplicatedVersioningWithFlush() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
assertTrue(indexResult.isCreated());
create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
assertTrue(indexResult.isCreated());
if (randomBoolean()) {
engine.flush();
}
if (randomBoolean()) {
replicaEngine.flush();
}
Engine.Index update = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
1, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult updateResult = engine.index(update);
assertThat(updateResult.getVersion(), equalTo(2L));
assertFalse(updateResult.isCreated());
update = new Engine.Index(newUid(doc), doc, updateResult.getSeqNo(), update.primaryTerm(), updateResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
updateResult = replicaEngine.index(update);
assertThat(updateResult.getVersion(), equalTo(2L));
assertFalse(updateResult.isCreated());
replicaEngine.refresh("test");
try (Searcher searcher = replicaEngine.acquireSearcher("test")) {
assertEquals(1, searcher.getDirectoryReader().numDocs());
}
engine.refresh("test");
try (Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(1, searcher.getDirectoryReader().numDocs());
}
}
/**
* simulates what an upsert / update API does
*/
@Test
public void testVersionedUpdate() throws IOException {
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), create.uid()), searcherFactory)) {
assertEquals(1, get.docIdAndVersion().version);
}
Engine.Index update_1 = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
1, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult update_1_result = engine.index(update_1);
assertThat(update_1_result.getVersion(), equalTo(2L));
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), create.uid()), searcherFactory)) {
assertEquals(2, get.docIdAndVersion().version);
}
Engine.Index update_2 = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
2, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult update_2_result = engine.index(update_2);
assertThat(update_2_result.getVersion(), equalTo(3L));
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), create.uid()), searcherFactory)) {
assertEquals(3, get.docIdAndVersion().version);
}
}
@Test
public void testVersioningNewIndex() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index index = indexForDoc(doc);
Engine.IndexResult indexResult = engine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
}
/*
* we are testing an edge case here where we have a fully deleted segment that is retained but has all it's IDs pruned away.
*/
@Test
public void testLookupVersionWithPrunedAwayIds() throws IOException {
try (Directory dir = newDirectory()) {
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(Lucene.STANDARD_ANALYZER);
indexWriterConfig.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD);
try (IndexWriter writer = new IndexWriter(dir,
indexWriterConfig.setMergePolicy(new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD,
MatchAllDocsQuery::new, new PrunePostingsMergePolicy(indexWriterConfig.getMergePolicy(), "_id"))))) {
org.apache.lucene.document.Document doc = new org.apache.lucene.document.Document();
doc.add(new Field(IdFieldMapper.NAME, "1", IdFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, -1));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.NAME, 1));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 1));
writer.addDocument(doc);
writer.flush();
writer.softUpdateDocument(new Term(IdFieldMapper.NAME, "1"), doc, new NumericDocValuesField(Lucene.SOFT_DELETES_FIELD, 1));
writer.updateNumericDocValue(new Term(IdFieldMapper.NAME, "1"), Lucene.SOFT_DELETES_FIELD, 1);
writer.forceMerge(1);
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertEquals(1, reader.leaves().size());
assertNull(VersionsAndSeqNoResolver.loadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "1"), false));
}
}
}
}
@Test
public void testUpdateWithFullyDeletedSegments() throws IOException {
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), Integer.MAX_VALUE);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final Set<String> liveDocs = new HashSet<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null,
null, globalCheckpoint::get))) {
int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
}
}
@Test
public void testForceMergeWithSoftDeletesRetention() throws Exception {
final long retainedExtraOps = randomLongBetween(0, 10);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), retainedExtraOps);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final MapperService mapperService = createMapperService("test");
final Set<String> liveDocs = new HashSet<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null,
null, globalCheckpoint::get))) {
int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
if (randomBoolean()) {
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
liveDocs.remove(doc.id());
}
if (randomBoolean()) {
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
if (randomBoolean()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush();
long localCheckpoint = engine.getProcessedLocalCheckpoint();
globalCheckpoint.set(randomLongBetween(0, localCheckpoint));
engine.syncTranslog();
final long safeCommitCheckpoint;
try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) {
safeCommitCheckpoint = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
Map<Long, Translog.Operation> ops = readAllOperationsInLucene(engine, mapperService)
.stream().collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity()));
for (long seqno = 0; seqno <= localCheckpoint; seqno++) {
long minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps,
safeCommitCheckpoint + 1);
String msg = "seq# [" + seqno + "], global checkpoint [" + globalCheckpoint + "], retained-ops [" +
retainedExtraOps + "]";
if (seqno < minSeqNoToRetain) {
Translog.Operation op = ops.get(seqno);
if (op != null) {
assertThat(op, instanceOf(Translog.Index.class));
assertThat(msg, ((Translog.Index) op).id(), isIn(liveDocs));
assertEquals(msg, ((Translog.Index) op).source(), B_1);
}
} else {
assertThat(msg, ops.get(seqno), notNullValue());
}
}
settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0);
indexSettings.updateIndexMetadata(IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(
settings).build());
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
globalCheckpoint.set(localCheckpoint);
engine.syncTranslog();
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocs.size()));
}
}
@Test
public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exception {
final long retainedExtraOps = randomLongBetween(0, 10);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), retainedExtraOps);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final MapperService mapperService = createMapperService("test");
final boolean omitSourceAllTheTime = randomBoolean();
final Set<String> liveDocs = new HashSet<>();
final Set<String> liveDocsWithSource = new HashSet<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null,
null,
globalCheckpoint::get))) {
int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null,
useRecoverySource);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
if (useRecoverySource == false) {
liveDocsWithSource.add(Integer.toString(i));
}
}
for (int i = 0; i < numDocs; i++) {
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null,
useRecoverySource);
if (randomBoolean()) {
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
liveDocs.remove(doc.id());
liveDocsWithSource.remove(doc.id());
}
if (randomBoolean()) {
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
if (useRecoverySource == false) {
liveDocsWithSource.add(doc.id());
} else {
liveDocsWithSource.remove(doc.id());
}
}
if (randomBoolean()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush();
globalCheckpoint.set(randomLongBetween(0, engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
final long minSeqNoToRetain;
try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) {
long safeCommitLocalCheckpoint = Long.parseLong(
safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps,
safeCommitLocalCheckpoint + 1);
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
Map<Long, Translog.Operation> ops = readAllOperationsInLucene(engine, mapperService)
.stream().collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity()));
for (long seqno = 0; seqno <= engine.getPersistedLocalCheckpoint(); seqno++) {
String msg = "seq# [" + seqno + "], global checkpoint [" + globalCheckpoint + "], retained-ops [" +
retainedExtraOps + "]";
if (seqno < minSeqNoToRetain) {
Translog.Operation op = ops.get(seqno);
if (op != null) {
assertThat(op, instanceOf(Translog.Index.class));
assertThat(msg, ((Translog.Index) op).id(), isIn(liveDocs));
}
} else {
Translog.Operation op = ops.get(seqno);
assertThat(msg, op, notNullValue());
if (op instanceof Translog.Index) {
assertEquals(msg, ((Translog.Index) op).source(), B_1);
}
}
}
settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0);
indexSettings.updateIndexMetadata(IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(
settings).build());
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
// If we already merged down to 1 segment, then the next force-merge will be a noop. We need to add an extra segment to make
// merges happen so we can verify that _recovery_source are pruned. See: https://github.com/elastic/elasticsearch/issues/41628.
final int numSegments;
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
numSegments = searcher.getDirectoryReader().leaves().size();
}
if (numSegments == 1) {
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
ParsedDocument doc = testParsedDocument("dummy", null, testDocument(), B_1, null, useRecoverySource);
engine.index(indexForDoc(doc));
if (useRecoverySource == false) {
liveDocsWithSource.add(doc.id());
}
engine.syncTranslog();
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.flush(randomBoolean(), true);
} else {
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.syncTranslog();
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocsWithSource.size()));
}
}
@Test
public void testForceMergeAndClose() throws IOException, InterruptedException {
int numIters = randomIntBetween(2, 10);
for (int j = 0; j < numIters; j++) {
try (Store store = createStore()) {
final InternalEngine engine = createEngine(store, createTempDir());
final CountDownLatch startGun = new CountDownLatch(1);
final CountDownLatch indexed = new CountDownLatch(1);
Thread thread = new Thread() {
@Override
public void run() {
try {
try {
startGun.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
int i = 0;
while (true) {
int numDocs = randomIntBetween(1, 20);
for (int j = 0; j < numDocs; j++) {
i++;
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1,
null);
Engine.Index index = indexForDoc(doc);
engine.index(index);
}
engine.refresh("test");
indexed.countDown();
try {
engine.forceMerge(
randomBoolean(),
1,
false,
randomBoolean(),
randomBoolean(),
UUIDs.randomBase64UUID()
);
} catch (IOException e) {
return;
}
}
} catch (AlreadyClosedException ex) {
// fine
} catch (IOException e) {
throw new AssertionError(e);
}
}
};
thread.start();
startGun.countDown();
int someIters = randomIntBetween(1, 10);
for (int i = 0; i < someIters; i++) {
engine.forceMerge(randomBoolean(), 1, false, randomBoolean(), randomBoolean(), UUIDs.randomBase64UUID());
}
indexed.await();
IOUtils.close(engine);
thread.join();
}
}
}
@Test
public void testVersioningCreateExistsException() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
create = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED,
VersionType.INTERNAL, PRIMARY, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = engine.index(create);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class));
}
@Test
public void testOutOfOrderDocsOnReplica() throws IOException {
final List<Engine.Operation> ops = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE),
false, 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true, logger);
}
@Test
public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, InterruptedException {
final List<Engine.Operation> opsDoc1 = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300, "1");
final Engine.Operation lastOpDoc1 = opsDoc1.get(opsDoc1.size() - 1);
final String lastFieldValueDoc1;
if (lastOpDoc1 instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOpDoc1;
lastFieldValueDoc1 = index.docs().get(0).get("value");
} else {
// delete
lastFieldValueDoc1 = null;
}
final List<Engine.Operation> opsDoc2 =
generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300, "2");
final Engine.Operation lastOpDoc2 = opsDoc2.get(opsDoc2.size() - 1);
final String lastFieldValueDoc2;
if (lastOpDoc2 instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOpDoc2;
lastFieldValueDoc2 = index.docs().get(0).get("value");
} else {
// delete
lastFieldValueDoc2 = null;
}
// randomly interleave
final AtomicLong seqNoGenerator = new AtomicLong();
BiFunction<Engine.Operation, Long, Engine.Operation> seqNoUpdater = (operation, newSeqNo) -> {
if (operation instanceof Engine.Index) {
Engine.Index index = (Engine.Index) operation;
Document doc = testDocumentWithTextField(index.docs().get(0).get("value"));
ParsedDocument parsedDocument = testParsedDocument(index.id(), index.routing(), doc, index.source(), null);
return new Engine.Index(index.uid(), parsedDocument, newSeqNo, index.primaryTerm(), index.version(),
index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(),
UNASSIGNED_SEQ_NO, 0);
} else {
Engine.Delete delete = (Engine.Delete) operation;
return new Engine.Delete(
delete.id(),
delete.uid(),
newSeqNo,
delete.primaryTerm(),
delete.version(),
delete.versionType(),
delete.origin(),
delete.startTime(),
UNASSIGNED_SEQ_NO,
0
);
}
};
final List<Engine.Operation> allOps = new ArrayList<>();
Iterator<Engine.Operation> iter1 = opsDoc1.iterator();
Iterator<Engine.Operation> iter2 = opsDoc2.iterator();
while (iter1.hasNext() && iter2.hasNext()) {
final Engine.Operation next = randomBoolean() ? iter1.next() : iter2.next();
allOps.add(seqNoUpdater.apply(next, seqNoGenerator.getAndIncrement()));
}
iter1.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o, seqNoGenerator.getAndIncrement())));
iter2.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o, seqNoGenerator.getAndIncrement())));
// insert some duplicates
randomSubsetOf(allOps).forEach(op -> allOps.add(seqNoUpdater.apply(op, op.seqNo())));
shuffle(allOps, random());
concurrentlyApplyOps(allOps, engine);
engine.refresh("test");
if (lastFieldValueDoc1 != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValueDoc1)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
if (lastFieldValueDoc2 != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValueDoc2)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
int totalExpectedOps = 0;
if (lastFieldValueDoc1 != null) {
totalExpectedOps++;
}
if (lastFieldValueDoc2 != null) {
totalExpectedOps++;
}
assertVisibleCount(engine, totalExpectedOps);
}
@Test
public void testInternalVersioningOnPrimary() throws IOException {
final List<Engine.Operation> ops = generateSingleDocHistory(
false, VersionType.INTERNAL, false, 2, 2, 20, "1");
assertOpsOnPrimary(ops, Versions.NOT_FOUND, true, engine);
}
@Test
public void testVersionOnPrimaryWithConcurrentRefresh() throws Exception {
List<Engine.Operation> ops = generateSingleDocHistory(
false, VersionType.INTERNAL, false, 2, 10, 100, "1");
CountDownLatch latch = new CountDownLatch(1);
AtomicBoolean running = new AtomicBoolean(true);
Thread refreshThread = new Thread(() -> {
latch.countDown();
while (running.get()) {
engine.refresh("test");
}
});
refreshThread.start();
try {
latch.await();
assertOpsOnPrimary(ops, Versions.NOT_FOUND, true, engine);
} finally {
running.set(false);
refreshThread.join();
}
}
private int assertOpsOnPrimary(List<Engine.Operation> ops,
long currentOpVersion,
boolean docDeleted,
InternalEngine engine)
throws IOException {
String lastFieldValue = null;
int opsPerformed = 0;
long lastOpVersion = currentOpVersion;
long lastOpSeqNo = UNASSIGNED_SEQ_NO;
long lastOpTerm = UNASSIGNED_PRIMARY_TERM;
PrimaryTermSupplier currentTerm = (PrimaryTermSupplier) engine.engineConfig.getPrimaryTermSupplier();
BiFunction<Long, Engine.Index, Engine.Index> indexWithVersion = (version, index) -> new Engine.Index(
index.uid(),
index.parsedDoc(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
version,
index.versionType(),
index.origin(),
index.startTime(),
index.getAutoGeneratedIdTimestamp(),
index.isRetry(),
UNASSIGNED_SEQ_NO,
0);
BiFunction<Long, Engine.Delete, Engine.Delete> delWithVersion = (version, delete) -> new Engine.Delete(
delete.id(),
delete.uid(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
version,
delete.versionType(),
delete.origin(),
delete.startTime(),
UNASSIGNED_SEQ_NO,
0);
TriFunction<Long, Long, Engine.Index, Engine.Index> indexWithSeq = (seqNo, term, index) -> new Engine.Index(
index.uid(),
index.parsedDoc(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
index.version(),
index.versionType(),
index.origin(),
index.startTime(),
index.getAutoGeneratedIdTimestamp(),
index.isRetry(),
seqNo,
term);
TriFunction<Long, Long, Engine.Delete, Engine.Delete> delWithSeq = (seqNo, term, delete) -> new Engine.Delete(
delete.id(),
delete.uid(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
delete.version(),
delete.versionType(),
delete.origin(),
delete.startTime(),
seqNo,
term);
Function<Engine.Index, Engine.Index> indexWithCurrentTerm = index -> new Engine.Index(
index.uid(),
index.parsedDoc(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
index.version(),
index.versionType(),
index.origin(),
index.startTime(),
index.getAutoGeneratedIdTimestamp(),
index.isRetry(),
index.getIfSeqNo(),
index.getIfPrimaryTerm());
Function<Engine.Delete, Engine.Delete> deleteWithCurrentTerm = delete -> new Engine.Delete(
delete.id(),
delete.uid(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
delete.version(),
delete.versionType(),
delete.origin(),
delete.startTime(),
delete.getIfSeqNo(),
delete.getIfPrimaryTerm());
for (Engine.Operation op : ops) {
final boolean versionConflict = rarely();
final boolean versionedOp = versionConflict || randomBoolean();
final long conflictingVersion = docDeleted || randomBoolean() ?
lastOpVersion + (randomBoolean() ? 1 : -1) :
Versions.MATCH_DELETED;
final long conflictingSeqNo = lastOpSeqNo == UNASSIGNED_SEQ_NO || randomBoolean() ?
lastOpSeqNo + 5 : // use 5 to go above 0 for magic numbers
lastOpSeqNo;
final long conflictingTerm = conflictingSeqNo == lastOpSeqNo || randomBoolean() ? lastOpTerm + 1 : lastOpTerm;
if (rarely()) {
currentTerm.set(currentTerm.get() + 1L);
engine.rollTranslogGeneration();
}
final long correctVersion = docDeleted ? Versions.MATCH_DELETED : lastOpVersion;
logger.info("performing [{}]{}{}",
op.operationType().name().charAt(0),
versionConflict ? " (conflict " + conflictingVersion + ")" : "",
versionedOp ? " (versioned " + correctVersion + ", seqNo " + lastOpSeqNo + ", term " + lastOpTerm + " )" : "");
if (op instanceof Engine.Index) {
final Engine.Index index = (Engine.Index) op;
if (versionConflict) {
// generate a conflict
final Engine.IndexResult result;
if (randomBoolean()) {
result = engine.index(indexWithSeq.apply(conflictingSeqNo, conflictingTerm, index));
} else {
result = engine.index(indexWithVersion.apply(conflictingVersion, index));
}
assertThat(result.isCreated(), equalTo(false));
assertThat(result.getVersion(), equalTo(lastOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
} else {
final Engine.IndexResult result;
if (versionedOp) {
// TODO: add support for non-existing docs
if (randomBoolean() && lastOpSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO && docDeleted == false) {
result = engine.index(indexWithSeq.apply(lastOpSeqNo, lastOpTerm, index));
} else {
result = engine.index(indexWithVersion.apply(correctVersion, index));
}
} else {
result = engine.index(indexWithCurrentTerm.apply(index));
}
assertThat(result.isCreated(), equalTo(docDeleted));
assertThat(result.getVersion(), equalTo(Math.max(lastOpVersion + 1, 1)));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
lastFieldValue = index.docs().get(0).get("value");
docDeleted = false;
lastOpVersion = result.getVersion();
lastOpSeqNo = result.getSeqNo();
lastOpTerm = result.getTerm();
opsPerformed++;
}
} else {
final Engine.Delete delete = (Engine.Delete) op;
if (versionConflict) {
// generate a conflict
Engine.DeleteResult result;
if (randomBoolean()) {
result = engine.delete(delWithSeq.apply(conflictingSeqNo, conflictingTerm, delete));
} else {
result = engine.delete(delWithVersion.apply(conflictingVersion, delete));
}
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(lastOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
} else {
final Engine.DeleteResult result;
long correctSeqNo = docDeleted ? UNASSIGNED_SEQ_NO : lastOpSeqNo;
if (versionedOp && lastOpSeqNo != UNASSIGNED_SEQ_NO && randomBoolean()) {
result = engine.delete(delWithSeq.apply(correctSeqNo, lastOpTerm, delete));
} else if (versionedOp) {
result = engine.delete(delWithVersion.apply(correctVersion, delete));
} else {
result = engine.delete(deleteWithCurrentTerm.apply(delete));
}
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(Math.max(lastOpVersion + 1, 1)));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = true;
lastOpVersion = result.getVersion();
lastOpSeqNo = result.getSeqNo();
lastOpTerm = result.getTerm();
opsPerformed++;
}
}
if (randomBoolean()) {
// refresh and take the chance to check everything is ok so far
assertVisibleCount(engine, docDeleted ? 0 : 1);
// even if doc is not not deleted, lastFieldValue can still be null if this is the
// first op and it failed.
if (docDeleted == false && lastFieldValue != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
if (randomBoolean()) {
engine.flush();
engine.refresh("test");
}
if (rarely()) {
// simulate GC deletes
engine.refresh("gc_simulation", Engine.SearcherScope.INTERNAL, true);
engine.clearDeletedTombstones();
if (docDeleted) {
lastOpVersion = Versions.NOT_FOUND;
lastOpSeqNo = UNASSIGNED_SEQ_NO;
lastOpTerm = UNASSIGNED_PRIMARY_TERM;
}
}
}
assertVisibleCount(engine, docDeleted ? 0 : 1);
if (docDeleted == false) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
return opsPerformed;
}
@Test
public void testNonInternalVersioningOnPrimary() throws IOException {
final Set<VersionType> nonInternalVersioning = new HashSet<>(Arrays.asList(VersionType.values()));
nonInternalVersioning.remove(VersionType.INTERNAL);
final VersionType versionType = randomFrom(nonInternalVersioning);
final List<Engine.Operation> ops = generateSingleDocHistory(
false, versionType, false, 2, 2, 20, "1");
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOp;
lastFieldValue = index.docs().get(0).get("value");
} else {
// delete
lastFieldValue = null;
}
// other version types don't support out of order processing.
if (versionType == VersionType.EXTERNAL) {
shuffle(ops, random());
}
long highestOpVersion = Versions.NOT_FOUND;
long seqNo = -1;
boolean docDeleted = true;
for (Engine.Operation op : ops) {
logger.info("performing [{}], v [{}], seq# [{}], term [{}]",
op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm());
if (op instanceof Engine.Index) {
final Engine.Index index = (Engine.Index) op;
Engine.IndexResult result = engine.index(index);
if (op.versionType().isVersionConflictForWrites(highestOpVersion, op.version(), docDeleted) == false) {
seqNo++;
assertThat(result.getSeqNo(), equalTo(seqNo));
assertThat(result.isCreated(), equalTo(docDeleted));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = false;
highestOpVersion = op.version();
} else {
assertThat(result.isCreated(), equalTo(false));
assertThat(result.getVersion(), equalTo(highestOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
}
} else {
final Engine.Delete delete = (Engine.Delete) op;
Engine.DeleteResult result = engine.delete(delete);
if (op.versionType().isVersionConflictForWrites(highestOpVersion, op.version(), docDeleted) == false) {
seqNo++;
assertThat(result.getSeqNo(), equalTo(seqNo));
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = true;
highestOpVersion = op.version();
} else {
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(highestOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
}
}
if (randomBoolean()) {
engine.refresh("test");
}
if (randomBoolean()) {
engine.flush();
engine.refresh("test");
}
}
assertVisibleCount(engine, docDeleted ? 0 : 1);
if (docDeleted == false) {
logger.info("searching for [{}]", lastFieldValue);
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
@Test
public void testVersioningPromotedReplica() throws IOException {
final List<Engine.Operation> replicaOps = generateSingleDocHistory(
true, VersionType.INTERNAL, false, 1, 2, 20, "1");
List<Engine.Operation> primaryOps = generateSingleDocHistory(
false, VersionType.INTERNAL, false, 2, 2, 20, "1");
Engine.Operation lastReplicaOp = replicaOps.get(replicaOps.size() - 1);
final boolean deletedOnReplica = lastReplicaOp instanceof Engine.Delete;
final long finalReplicaVersion = lastReplicaOp.version();
final long finalReplicaSeqNo = lastReplicaOp.seqNo();
assertOpsOnReplica(replicaOps, replicaEngine, true, logger);
final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine);
final long currentSeqNo = getSequenceID(
replicaEngine,
new Engine.Get(lastReplicaOp.uid().text(), lastReplicaOp.uid())).v1();
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector);
if (collector.getTotalHits() > 0) {
// last op wasn't delete
assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary));
}
}
}
@Test
public void testConcurrentExternalVersioningOnPrimary() throws IOException, InterruptedException {
final List<Engine.Operation> ops = generateSingleDocHistory(
false, VersionType.EXTERNAL, false, 2, 100, 300, "1");
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOp;
lastFieldValue = index.docs().get(0).get("value");
} else {
// delete
lastFieldValue = null;
}
shuffle(ops, random());
concurrentlyApplyOps(ops, engine);
assertVisibleCount(engine, lastFieldValue == null ? 0 : 1);
if (lastFieldValue != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
@Test
public void testConcurrentGetAndSetOnPrimary() throws IOException, InterruptedException {
Thread[] thread = new Thread[randomIntBetween(3, 5)];
CountDownLatch startGun = new CountDownLatch(thread.length);
final int opsPerThread = randomIntBetween(10, 20);
class OpAndVersion {
final long version;
final String removed;
final String added;
OpAndVersion(long version, String removed, String added) {
this.version = version;
this.removed = removed;
this.added = added;
}
}
final AtomicInteger idGenerator = new AtomicInteger();
final Queue<OpAndVersion> history = ConcurrentCollections.newQueue();
ParsedDocument doc = testParsedDocument("1", null, testDocument(), bytesArray(""), null);
final Term uidTerm = newUid(doc);
engine.index(indexForDoc(doc));
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
for (int i = 0; i < thread.length; i++) {
thread[i] = new Thread(() -> {
startGun.countDown();
try {
startGun.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
for (int op = 0; op < opsPerThread; op++) {
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), uidTerm), searcherFactory)) {
FieldsVisitor visitor = new FieldsVisitor(true);
get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor);
List<String> values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString()));
String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null;
String added = "v_" + idGenerator.incrementAndGet();
values.add(added);
Engine.Index index = new Engine.Index(uidTerm,
testParsedDocument("1", null, testDocument(),
bytesArray(Strings.collectionToCommaDelimitedString(values)), null),
UNASSIGNED_SEQ_NO, 2,
get.docIdAndVersion().version, VersionType.INTERNAL,
PRIMARY, System.currentTimeMillis(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(index);
if (indexResult.getResultType() == Engine.Result.Type.SUCCESS) {
history.add(new OpAndVersion(indexResult.getVersion(), removed, added));
}
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
thread[i].start();
}
for (int i = 0; i < thread.length; i++) {
thread[i].join();
}
List<OpAndVersion> sortedHistory = new ArrayList<>(history);
sortedHistory.sort(Comparator.comparing(o -> o.version));
Set<String> currentValues = new HashSet<>();
for (int i = 0; i < sortedHistory.size(); i++) {
OpAndVersion op = sortedHistory.get(i);
if (i > 0) {
assertThat("duplicate version", op.version, not(equalTo(sortedHistory.get(i - 1).version)));
}
boolean exists = op.removed == null ? true : currentValues.remove(op.removed);
assertTrue(op.removed + " should exist", exists);
exists = currentValues.add(op.added);
assertTrue(op.added + " should not exist", exists);
}
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), uidTerm), searcherFactory)) {
FieldsVisitor visitor = new FieldsVisitor(true);
get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor);
List<String> values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString()));
assertThat(currentValues, equalTo(new HashSet<>(values)));
}
}
@Test
public void testBasicCreatedFlag() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index index = indexForDoc(doc);
Engine.IndexResult indexResult = engine.index(index);
assertTrue(indexResult.isCreated());
index = indexForDoc(doc);
indexResult = engine.index(index);
assertFalse(indexResult.isCreated());
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
index = indexForDoc(doc);
indexResult = engine.index(index);
assertTrue(indexResult.isCreated());
}
private static class MockAppender extends AbstractAppender {
public boolean sawIndexWriterMessage;
public boolean sawIndexWriterIFDMessage;
MockAppender(final String name) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0],
false, null, null), null);
}
@Override
public void append(LogEvent event) {
final String formattedMessage = event.getMessage().getFormattedMessage();
if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0]")) {
if (event.getLoggerName().endsWith(".IW") &&
formattedMessage.contains("IW: now apply all deletes")) {
sawIndexWriterMessage = true;
}
if (event.getLoggerName().endsWith(".IFD")) {
sawIndexWriterIFDMessage = true;
}
}
}
}
// #5891: make sure IndexWriter's infoStream output is
// sent to lucene.iw with log level TRACE:
@Test
public void testIndexWriterInfoStream() throws IllegalAccessException, IOException {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender("testIndexWriterInfoStream");
mockAppender.start();
Logger rootLogger = LogManager.getRootLogger();
Level savedLevel = rootLogger.getLevel();
Loggers.addAppender(rootLogger, mockAppender);
Loggers.setLevel(rootLogger, Level.DEBUG);
rootLogger = LogManager.getRootLogger();
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
// Again, with TRACE, which should log IndexWriter output:
Loggers.setLevel(rootLogger, Level.TRACE);
engine.index(indexForDoc(doc));
engine.flush();
assertTrue(mockAppender.sawIndexWriterMessage);
} finally {
Loggers.removeAppender(rootLogger, mockAppender);
mockAppender.stop();
Loggers.setLevel(rootLogger, savedLevel);
}
}
@Test
public void testSeqNoAndCheckpoints() throws IOException, InterruptedException {
final int opCount = randomIntBetween(1, 256);
long primarySeqNo = SequenceNumbers.NO_OPS_PERFORMED;
final String[] ids = new String[]{"1", "2", "3"};
final Set<String> indexedIds = new HashSet<>();
long localCheckpoint = SequenceNumbers.NO_OPS_PERFORMED;
long replicaLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED;
final long globalCheckpoint;
long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
IOUtils.close(store, engine);
store = createStore();
InternalEngine initialEngine = null;
try {
initialEngine = createEngine(defaultSettings, store, createTempDir(), newLogMergePolicy(), null);
final ShardRouting primary = TestShardRouting.newShardRouting("test",
shardId.id(), "node1", null, true,
ShardRoutingState.STARTED, allocationId);
final ShardRouting initializingReplica =
TestShardRouting.newShardRouting(shardId, "node2", false, ShardRoutingState.INITIALIZING);
ReplicationTracker gcpTracker = (ReplicationTracker) initialEngine.config().getGlobalCheckpointSupplier();
gcpTracker.updateFromMaster(1L, new HashSet<>(Collections.singletonList(primary.allocationId().getId())),
new IndexShardRoutingTable.Builder(shardId).addShard(primary).build());
gcpTracker.activatePrimaryMode(primarySeqNo);
if (defaultSettings.isSoftDeleteEnabled()) {
final CountDownLatch countDownLatch = new CountDownLatch(1);
gcpTracker.addPeerRecoveryRetentionLease(initializingReplica.currentNodeId(),
SequenceNumbers.NO_OPS_PERFORMED, ActionListener.wrap(countDownLatch::countDown));
countDownLatch.await(5, TimeUnit.SECONDS);
}
gcpTracker.updateFromMaster(2L, new HashSet<>(Collections.singletonList(primary.allocationId().getId())),
new IndexShardRoutingTable.Builder(shardId).addShard(primary).addShard(initializingReplica).build());
gcpTracker.initiateTracking(initializingReplica.allocationId().getId());
gcpTracker.markAllocationIdAsInSync(initializingReplica.allocationId().getId(), replicaLocalCheckpoint);
final ShardRouting replica = initializingReplica.moveToStarted();
gcpTracker.updateFromMaster(3L, new HashSet<>(Arrays.asList(primary.allocationId().getId(), replica.allocationId().getId())),
new IndexShardRoutingTable.Builder(shardId).addShard(primary).addShard(replica).build());
for (int op = 0; op < opCount; op++) {
final String id;
// mostly index, sometimes delete
if (rarely() && indexedIds.isEmpty() == false) {
// we have some docs indexed, so delete one of them
id = randomFrom(indexedIds);
final Engine.Delete delete = new Engine.Delete(
id,
newUid(id),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
rarely() ? 100 : Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
final Engine.DeleteResult result = initialEngine.delete(delete);
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1));
indexedIds.remove(id);
primarySeqNo++;
} else {
assertThat(result.getSeqNo(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
}
} else {
// index a document
id = randomFrom(ids);
ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
final Engine.Index index = new Engine.Index(newUid(doc), doc,
UNASSIGNED_SEQ_NO, primaryTerm.get(),
rarely() ? 100 : Versions.MATCH_ANY, VersionType.INTERNAL,
PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
final Engine.IndexResult result = initialEngine.index(index);
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1));
indexedIds.add(id);
primarySeqNo++;
} else {
assertThat(result.getSeqNo(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
}
}
initialEngine.syncTranslog(); // to advance persisted local checkpoint
if (randomInt(10) < 3) {
// only update rarely as we do it every doc
replicaLocalCheckpoint = randomIntBetween(Math.toIntExact(replicaLocalCheckpoint), Math.toIntExact(primarySeqNo));
}
gcpTracker.updateLocalCheckpoint(primary.allocationId().getId(),
initialEngine.getPersistedLocalCheckpoint());
gcpTracker.updateLocalCheckpoint(initializingReplica.allocationId().getId(), replicaLocalCheckpoint);
if (rarely()) {
localCheckpoint = primarySeqNo;
maxSeqNo = primarySeqNo;
initialEngine.flush(true, true);
}
}
logger.info("localcheckpoint {}, global {}", replicaLocalCheckpoint, primarySeqNo);
globalCheckpoint = gcpTracker.getGlobalCheckpoint();
assertEquals(primarySeqNo, initialEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(primarySeqNo, initialEngine.getPersistedLocalCheckpoint());
assertThat(globalCheckpoint, equalTo(replicaLocalCheckpoint));
assertThat(
Long.parseLong(initialEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
equalTo(localCheckpoint));
initialEngine.getTranslog().sync(); // to guarantee the global checkpoint is written to the translog checkpoint
assertThat(
initialEngine.getTranslog().getLastSyncedGlobalCheckpoint(),
equalTo(globalCheckpoint));
assertThat(
Long.parseLong(initialEngine.commitStats().getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
equalTo(maxSeqNo));
} finally {
IOUtils.close(initialEngine);
}
try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())) {
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertThat(
Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
equalTo(primarySeqNo));
assertThat(
recoveringEngine.getTranslog().getLastSyncedGlobalCheckpoint(),
equalTo(globalCheckpoint));
assertThat(
Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
// after recovering from translog, all docs have been flushed to Lucene segments, so here we will assert
// that the committed max seq no is equivalent to what the current primary seq no is, as all data
// we have assigned sequence numbers to should be in the commit
equalTo(primarySeqNo));
assertThat(recoveringEngine.getProcessedLocalCheckpoint(), equalTo(primarySeqNo));
assertThat(recoveringEngine.getPersistedLocalCheckpoint(), equalTo(primarySeqNo));
assertThat(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
assertThat(generateNewSeqNo(recoveringEngine), equalTo(primarySeqNo + 1));
}
}
// this test writes documents to the engine while concurrently flushing/commit
// and ensuring that the commit points contain the correct sequence number data
@Test
public void testConcurrentWritesAndCommits() throws Exception {
List<Engine.IndexCommitRef> commits = new ArrayList<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) {
final int numIndexingThreads = scaledRandomIntBetween(2, 4);
final int numDocsPerThread = randomIntBetween(500, 1000);
final CyclicBarrier barrier = new CyclicBarrier(numIndexingThreads + 1);
final List<Thread> indexingThreads = new ArrayList<>();
final CountDownLatch doneLatch = new CountDownLatch(numIndexingThreads);
// create N indexing threads to index documents simultaneously
for (int threadNum = 0; threadNum < numIndexingThreads; threadNum++) {
final int threadIdx = threadNum;
Thread indexingThread = new Thread(() -> {
try {
barrier.await(); // wait for all threads to start at the same time
// index random number of docs
for (int i = 0; i < numDocsPerThread; i++) {
final String id = "thread" + threadIdx + "#" + i;
ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
}
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
doneLatch.countDown();
}
});
indexingThreads.add(indexingThread);
}
// start the indexing threads
for (Thread thread : indexingThreads) {
thread.start();
}
barrier.await(); // wait for indexing threads to all be ready to start
int commitLimit = randomIntBetween(10, 20);
long sleepTime = 1;
// create random commit points
boolean doneIndexing;
do {
doneIndexing = doneLatch.await(sleepTime, TimeUnit.MILLISECONDS);
commits.add(engine.acquireLastIndexCommit(true));
if (commits.size() > commitLimit) { // don't keep on piling up too many commits
IOUtils.close(commits.remove(randomIntBetween(0, commits.size()-1)));
// we increase the wait time to make sure we eventually if things are slow wait for threads to finish.
// this will reduce pressure on disks and will allow threads to make progress without piling up too many commits
sleepTime = sleepTime * 2;
}
} while (doneIndexing == false);
// now, verify all the commits have the correct docs according to the user commit data
long prevLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED;
long prevMaxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
for (Engine.IndexCommitRef commitRef : commits) {
final IndexCommit commit = commitRef.getIndexCommit();
Map<String, String> userData = commit.getUserData();
long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ?
Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) :
SequenceNumbers.NO_OPS_PERFORMED;
long maxSeqNo = userData.containsKey(SequenceNumbers.MAX_SEQ_NO) ?
Long.parseLong(userData.get(SequenceNumbers.MAX_SEQ_NO)) :
UNASSIGNED_SEQ_NO;
// local checkpoint and max seq no shouldn't go backwards
assertThat(localCheckpoint, greaterThanOrEqualTo(prevLocalCheckpoint));
assertThat(maxSeqNo, greaterThanOrEqualTo(prevMaxSeqNo));
try (IndexReader reader = DirectoryReader.open(commit)) {
Long highest = getHighestSeqNo(reader);
final long highestSeqNo;
if (highest != null) {
highestSeqNo = highest.longValue();
} else {
highestSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
}
// make sure localCheckpoint <= highest seq no found <= maxSeqNo
assertThat(highestSeqNo, greaterThanOrEqualTo(localCheckpoint));
assertThat(highestSeqNo, lessThanOrEqualTo(maxSeqNo));
// make sure all sequence numbers up to and including the local checkpoint are in the index
FixedBitSet seqNosBitSet = getSeqNosSet(reader, highestSeqNo);
for (int i = 0; i <= localCheckpoint; i++) {
assertTrue("local checkpoint [" + localCheckpoint + "], _seq_no [" + i + "] should be indexed",
seqNosBitSet.get(i));
}
}
prevLocalCheckpoint = localCheckpoint;
prevMaxSeqNo = maxSeqNo;
}
}
}
private static Long getHighestSeqNo(final IndexReader reader) throws IOException {
final String fieldName = SeqNoFieldMapper.NAME;
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return null;
}
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return LongPoint.decodeDimension(max, 0);
}
private static FixedBitSet getSeqNosSet(final IndexReader reader, final long highestSeqNo) throws IOException {
// _seq_no are stored as doc values for the time being, so this is how we get them
// (as opposed to using an IndexSearcher or IndexReader)
final FixedBitSet bitSet = new FixedBitSet((int) highestSeqNo + 1);
final List<LeafReaderContext> leaves = reader.leaves();
if (leaves.isEmpty()) {
return bitSet;
}
for (int i = 0; i < leaves.size(); i++) {
final LeafReader leaf = leaves.get(i).reader();
final NumericDocValues values = leaf.getNumericDocValues(SeqNoFieldMapper.NAME);
if (values == null) {
continue;
}
final Bits bits = leaf.getLiveDocs();
for (int docID = 0; docID < leaf.maxDoc(); docID++) {
if (bits == null || bits.get(docID)) {
if (values.advanceExact(docID) == false) {
throw new AssertionError("Document does not have a seq number: " + docID);
}
final long seqNo = values.longValue();
assertFalse("should not have more than one document with the same seq_no[" +
seqNo + "]", bitSet.get((int) seqNo));
bitSet.set((int) seqNo);
}
}
}
return bitSet;
}
// #8603: make sure we can separately log IFD's messages
@Test
public void testIndexWriterIFDInfoStream() throws IllegalAccessException, IOException {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender("testIndexWriterIFDInfoStream");
mockAppender.start();
final Logger iwIFDLogger = LogManager.getLogger("org.elasticsearch.index.engine.Engine.IFD");
Loggers.addAppender(iwIFDLogger, mockAppender);
Loggers.setLevel(iwIFDLogger, Level.DEBUG);
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertFalse(mockAppender.sawIndexWriterIFDMessage);
// Again, with TRACE, which should only log IndexWriter IFD output:
Loggers.setLevel(iwIFDLogger, Level.TRACE);
engine.index(indexForDoc(doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage);
} finally {
Loggers.removeAppender(iwIFDLogger, mockAppender);
mockAppender.stop();
Loggers.setLevel(iwIFDLogger, (Level) null);
}
}
@Test
public void testEnableGcDeletes() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) {
engine.config().setEnableGcDeletes(false);
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
// Add document
Document document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
ParsedDocument doc = testParsedDocument("1", null, document, B_2, null);
engine.index(new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 1,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
// Delete document we just added:
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
0,
10,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// Get should not find the document
Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
// Give the gc pruning logic a chance to kick in
Thread.sleep(1000);
if (randomBoolean()) {
engine.refresh("test");
}
// Delete non-existent document
engine.delete(new Engine.Delete(
"2",
newUid("2"),
UNASSIGNED_SEQ_NO,
0,
10,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// Get should not find the document (we never indexed uid=2):
getResult = engine.get(new Engine.Get("2", newUid("2")), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
// Try to index uid=1 with a too-old version, should fail:
Engine.Index index = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2,
VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(index);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class));
// Get should still not find the document
getResult = engine.get(newGet(doc), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
// Try to index uid=2 with a too-old version, should fail:
Engine.Index index1 = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2,
VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = engine.index(index1);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class));
// Get should not find the document
getResult = engine.get(newGet(doc), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
}
}
@Test
public void testExtractShardId() {
try (Engine.Searcher test = this.engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
ShardId shardId = ShardUtils.extractShardId(test.getDirectoryReader());
assertNotNull(shardId);
assertEquals(shardId, engine.config().getShardId());
}
}
/**
* Random test that throws random exception and ensures all references are
* counted down / released and resources are closed.
*/
@Test
public void testFailStart() throws IOException {
// this test fails if any reader, searcher or directory is not closed - MDW FTW
final int iters = scaledRandomIntBetween(10, 100);
for (int i = 0; i < iters; i++) {
MockDirectoryWrapper wrapper = newMockDirectory();
wrapper.setFailOnOpenInput(randomBoolean());
wrapper.setAllowRandomFileNotFoundException(randomBoolean());
wrapper.setRandomIOExceptionRate(randomDouble());
wrapper.setRandomIOExceptionRateOnOpen(randomDouble());
final Path translogPath = createTempDir("testFailStart");
try (Store store = createStore(wrapper)) {
int refCount = store.refCount();
assertTrue("refCount: " + store.refCount(), store.refCount() > 0);
InternalEngine holder;
try {
holder = createEngine(store, translogPath);
} catch (EngineCreationFailureException | IOException ex) {
assertEquals(store.refCount(), refCount);
continue;
}
assertEquals(store.refCount(), refCount + 1);
final int numStarts = scaledRandomIntBetween(1, 5);
for (int j = 0; j < numStarts; j++) {
try {
assertEquals(store.refCount(), refCount + 1);
holder.close();
holder = createEngine(store, translogPath);
assertEquals(store.refCount(), refCount + 1);
} catch (EngineCreationFailureException ex) {
// all is fine
assertEquals(store.refCount(), refCount);
break;
}
}
holder.close();
assertEquals(store.refCount(), refCount);
}
}
}
@Test
public void testSettings() {
CodecService codecService = new CodecService(null, logger);
LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig();
assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
}
@Test
public void testCurrentTranslogIDisCommitted() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null,
globalCheckpoint::get);
// create
{
store.createEmpty(Version.CURRENT.luceneVersion);
final String translogUUID =
Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
ParsedDocument doc = testParsedDocument(Integer.toString(0), null, testDocument(),
new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
try (InternalEngine engine = createEngine(config)) {
engine.index(firstIndexRequest);
engine.syncTranslog(); // to advance persisted local checkpoint
assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint());
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE));
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
}
}
// open and recover tlog
{
for (int i = 0; i < 2; i++) {
try (InternalEngine engine = new InternalEngine(config)) {
expectThrows(IllegalStateException.class, engine::ensureCanFlush);
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
}
}
}
// open index with new tlog
{
final String translogUUID =
Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
try (InternalEngine engine = new InternalEngine(config)) {
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(2, engine.getTranslog().currentFileGeneration());
}
}
// open and recover tlog with empty tlog
{
for (int i = 0; i < 2; i++) {
try (InternalEngine engine = new InternalEngine(config)) {
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
}
}
}
}
}
@Test
public void testMissingTranslog() throws IOException {
// test that we can force start the engine , even if the translog is missing.
engine.close();
// fake a new translog, causing the engine to point to a missing one.
final long newPrimaryTerm = randomLongBetween(0L, primaryTerm.get());
final Translog translog = createTranslog(() -> newPrimaryTerm);
long id = translog.currentFileGeneration();
translog.close();
IOUtils.rm(translog.location().resolve(Translog.getFilename(id)));
expectThrows(EngineCreationFailureException.class, "engine shouldn't start without a valid translog id",
() -> createEngine(store, primaryTranslogDir));
// when a new translog is created it should be ok
final String translogUUID = Translog.createEmptyTranslog(primaryTranslogDir, UNASSIGNED_SEQ_NO, shardId, newPrimaryTerm);
store.associateIndexWithNewTranslog(translogUUID);
EngineConfig config = config(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null);
engine = new InternalEngine(config);
}
@Test
public void testTranslogReplayWithFailure() throws IOException {
final MockDirectoryWrapper directory = newMockDirectory();
final Path translogPath = createTempDir("testTranslogReplayWithFailure");
try (Store store = createStore(directory)) {
final int numDocs = randomIntBetween(1, 10);
try (InternalEngine engine = createEngine(store, translogPath)) {
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
assertVisibleCount(engine, numDocs);
}
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
final int numIters = randomIntBetween(3, 5);
for (int i = 0; i < numIters; i++) {
directory.setRandomIOExceptionRateOnOpen(randomDouble());
directory.setRandomIOExceptionRate(randomDouble());
directory.setFailOnOpenInput(randomBoolean());
directory.setAllowRandomFileNotFoundException(randomBoolean());
boolean started = false;
InternalEngine engine = null;
try {
engine = createEngine(store, translogPath);
started = true;
} catch (EngineException | IOException e) {
logger.trace("exception on open", e);
}
directory.setRandomIOExceptionRateOnOpen(0.0);
directory.setRandomIOExceptionRate(0.0);
directory.setFailOnOpenInput(false);
directory.setAllowRandomFileNotFoundException(false);
if (started) {
engine.refresh("warm_up");
assertVisibleCount(engine, numDocs, false);
engine.close();
}
}
}
}
@Test
public void testTranslogCleanUpPostCommitCrash() throws Exception {
IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetadata(), defaultSettings.getNodeSettings(),
defaultSettings.getScopedSettings());
IndexMetadata.Builder builder = IndexMetadata.builder(indexSettings.getIndexMetadata());
builder.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), "-1")
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1")
);
indexSettings.updateIndexMetadata(builder.build());
try (Store store = createStore()) {
AtomicBoolean throwErrorOnCommit = new AtomicBoolean();
final Path translogPath = createTempDir();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final LongSupplier globalCheckpointSupplier = globalCheckpoint::get;
store.createEmpty(Version.CURRENT.luceneVersion);
final String translogUUID = Translog.createEmptyTranslog(translogPath, globalCheckpoint.get(), shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
try (InternalEngine engine =
new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null,
globalCheckpointSupplier)) {
@Override
protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException {
super.commitIndexWriter(writer, translog, syncId);
if (throwErrorOnCommit.get()) {
throw new RuntimeException("power's out");
}
}
}) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
final ParsedDocument doc1 = testParsedDocument("1", null,
testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc1));
engine.syncTranslog(); // to advance local checkpoint
assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint());
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
throwErrorOnCommit.set(true);
FlushFailedEngineException e = expectThrows(FlushFailedEngineException.class, engine::flush);
assertThat(e.getCause().getMessage(), equalTo("power's out"));
}
try (InternalEngine engine =
new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null,
globalCheckpointSupplier))) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertVisibleCount(engine, 1);
final long localCheckpoint = Long.parseLong(
engine.getLastCommittedSegmentInfos().userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
final long committedGen = engine.getTranslog().getMinGenerationForSeqNo(localCheckpoint + 1).translogFileGeneration;
for (int gen = 1; gen < committedGen; gen++) {
final Path genFile = translogPath.resolve(Translog.getFilename(gen));
assertFalse(genFile + " wasn't cleaned up", Files.exists(genFile));
}
}
}
}
@Test
public void testSkipTranslogReplay() throws IOException {
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
EngineConfig config = engine.config();
assertVisibleCount(engine, numDocs);
engine.close();
try (InternalEngine engine = new InternalEngine(config)) {
engine.skipTranslogRecovery();
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits.value, equalTo(0L));
}
}
}
@Test
public void testTranslogReplay() throws IOException {
final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getProcessedLocalCheckpoint();
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
assertVisibleCount(engine, numDocs);
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
engine.close();
// we need to reuse the engine config unless the parser.mappingModified won't work
engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
engine.refresh("warm_up");
assertVisibleCount(engine, numDocs, false);
engine.close();
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
engine.refresh("warm_up");
assertVisibleCount(engine, numDocs, false);
final boolean flush = randomBoolean();
int randomId = randomIntBetween(numDocs + 1, numDocs + 10);
ParsedDocument doc = testParsedDocument(Integer.toString(randomId), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 1,
VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
if (flush) {
engine.flush();
engine.refresh("test");
}
doc = testParsedDocument(Integer.toString(randomId), null, testDocument(), new BytesArray("{}"), null);
Engine.Index idxRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2,
VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult result = engine.index(idxRequest);
engine.refresh("test");
assertThat(result.getVersion(), equalTo(2L));
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
}
engine.close();
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
engine.refresh("warm_up");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
}
engine.delete(new Engine.Delete(
Integer.toString(randomId),
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
if (randomBoolean()) {
engine.close();
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs);
assertThat(topDocs.totalHits.value, equalTo((long) numDocs));
}
}
@Test
public void testRecoverFromForeignTranslog() throws IOException {
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult index = engine.index(firstIndexRequest);
assertThat(index.getVersion(), equalTo(1L));
}
assertVisibleCount(engine, numDocs);
Translog.TranslogGeneration generation = engine.getTranslog().getGeneration();
engine.close();
final Path badTranslogLog = createTempDir();
final String badUUID = Translog.createEmptyTranslog(badTranslogLog, SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
Translog translog = new Translog(
new TranslogConfig(shardId, badTranslogLog, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE),
badUUID, createTranslogDeletionPolicy(INDEX_SETTINGS), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get, seqNo -> {});
translog.add(new Translog.Index(
"SomeBogusId",
0,
primaryTerm.get(),
"{}".getBytes(Charset.forName("UTF-8"))));
assertEquals(generation.translogFileGeneration, translog.currentFileGeneration());
translog.close();
EngineConfig config = engine.config();
/* create a TranslogConfig that has been created with a different UUID */
TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(),
BigArrays.NON_RECYCLING_INSTANCE);
EngineConfig brokenConfig = new EngineConfig(
shardId,
allocationId.getId(),
threadPool,
config.getIndexSettings(),
store,
newMergePolicy(),
config.getAnalyzer(),
new CodecService(null, logger),
config.getEventListener(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
translogConfig,
TimeValue.timeValueMinutes(5),
config.getExternalRefreshListener(),
config.getInternalRefreshListener(),
new NoneCircuitBreakerService(),
() -> UNASSIGNED_SEQ_NO,
() -> RetentionLeases.EMPTY,
primaryTerm::get,
tombstoneDocSupplier()
);
expectThrows(EngineCreationFailureException.class, () -> new InternalEngine(brokenConfig));
engine = createEngine(store, primaryTranslogDir); // and recover again!
assertVisibleCount(engine, numDocs, true);
}
@Test
public void testShardNotAvailableExceptionWhenEngineClosedConcurrently() throws IOException, InterruptedException {
AtomicReference<Exception> exception = new AtomicReference<>();
String operation = randomFrom("optimize", "refresh", "flush");
Thread mergeThread = new Thread() {
@Override
public void run() {
boolean stop = false;
logger.info("try with {}", operation);
while (stop == false) {
try {
switch (operation) {
case "optimize": {
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
break;
}
case "refresh": {
engine.refresh("test refresh");
break;
}
case "flush": {
engine.flush(true, false);
break;
}
}
} catch (Exception e) {
exception.set(e);
stop = true;
}
}
}
};
mergeThread.start();
engine.close();
mergeThread.join();
logger.info("exception caught: ", exception.get());
assertTrue("expected an Exception that signals shard is not available",
TransportActions.isShardNotAvailableException(exception.get()));
}
/**
* Tests that when the close method returns the engine is actually guaranteed to have cleaned up and that resources are closed
*/
@Test
public void testConcurrentEngineClosed() throws BrokenBarrierException, InterruptedException {
Thread[] closingThreads = new Thread[3];
CyclicBarrier barrier = new CyclicBarrier(1 + closingThreads.length + 1);
Thread failEngine = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
@Override
protected void doRun() throws Exception {
barrier.await();
engine.failEngine("test", new RuntimeException("test"));
}
});
failEngine.start();
for (int i = 0;i < closingThreads.length ; i++) {
boolean flushAndClose = randomBoolean();
closingThreads[i] = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
@Override
protected void doRun() throws Exception {
barrier.await();
if (flushAndClose) {
engine.flushAndClose();
} else {
engine.close();
}
// try to acquire the writer lock - i.e., everything is closed, we need to synchronize
// to avoid races between closing threads
synchronized (closingThreads) {
try (Lock ignored = store.directory().obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
// all good.
}
}
}
});
closingThreads[i].setName("closingThread_" + i);
closingThreads[i].start();
}
barrier.await();
failEngine.join();
for (Thread t : closingThreads) {
t.join();
}
}
private static class ThrowingIndexWriter extends IndexWriter {
private AtomicReference<Supplier<Exception>> failureToThrow = new AtomicReference<>();
ThrowingIndexWriter(Directory d, IndexWriterConfig conf) throws IOException {
super(d, conf);
}
@Override
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
maybeThrowFailure();
return super.addDocument(doc);
}
private void maybeThrowFailure() throws IOException {
if (failureToThrow.get() != null) {
Exception failure = failureToThrow.get().get();
clearFailure(); // one shot
if (failure instanceof RuntimeException) {
throw (RuntimeException) failure;
} else if (failure instanceof IOException) {
throw (IOException) failure;
} else {
assert false: "unsupported failure class: " + failure.getClass().getCanonicalName();
}
}
}
@Override
public long softUpdateDocument(Term term, Iterable<? extends IndexableField> doc, Field... softDeletes) throws IOException {
maybeThrowFailure();
return super.softUpdateDocument(term, doc, softDeletes);
}
@Override
public long deleteDocuments(Term... terms) throws IOException {
maybeThrowFailure();
return super.deleteDocuments(terms);
}
public void setThrowFailure(Supplier<Exception> failureSupplier) {
failureToThrow.set(failureSupplier);
}
public void clearFailure() {
failureToThrow.set(null);
}
}
@Test
public void testHandleDocumentFailure() throws Exception {
try (Store store = createStore()) {
final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
final ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_1, null);
final ParsedDocument doc3 = testParsedDocument("3", null, testDocumentWithTextField(), B_1, null);
AtomicReference<ThrowingIndexWriter> throwingIndexWriter = new AtomicReference<>();
try (InternalEngine engine = createEngine(
defaultSettings,
store,
createTempDir(),
NoMergePolicy.INSTANCE,
(directory, iwc) -> {
throwingIndexWriter.set(new ThrowingIndexWriter(directory, iwc));
return throwingIndexWriter.get();
})
) {
// test document failure while indexing
if (randomBoolean()) {
throwingIndexWriter.get().setThrowFailure(() -> new IOException("simulated"));
} else {
throwingIndexWriter.get().setThrowFailure(() -> new IllegalArgumentException("simulated max token length"));
}
// test index with document failure
Engine.IndexResult indexResult = engine.index(indexForDoc(doc1));
assertNotNull(indexResult.getFailure());
assertThat(indexResult.getSeqNo(), equalTo(0L));
assertThat(indexResult.getVersion(), equalTo(Versions.MATCH_ANY));
assertNotNull(indexResult.getTranslogLocation());
throwingIndexWriter.get().clearFailure();
indexResult = engine.index(indexForDoc(doc1));
assertThat(indexResult.getSeqNo(), equalTo(1L));
assertThat(indexResult.getVersion(), equalTo(1L));
assertNull(indexResult.getFailure());
assertNotNull(indexResult.getTranslogLocation());
engine.index(indexForDoc(doc2));
// test non document level failure is thrown
if (randomBoolean()) {
// simulate close by corruption
throwingIndexWriter.get().setThrowFailure(null);
UncheckedIOException uncheckedIOException = expectThrows(UncheckedIOException.class, () -> {
Engine.Index index = indexForDoc(doc3);
index.parsedDoc().rootDoc().add(new StoredField("foo", "bar") {
// this is a hack to add a failure during store document which triggers a tragic event
// and in turn fails the engine
@Override
public BytesRef binaryValue() {
throw new UncheckedIOException(new MockDirectoryWrapper.FakeIOException());
}
});
engine.index(index);
});
assertTrue(uncheckedIOException.getCause() instanceof MockDirectoryWrapper.FakeIOException);
} else {
// normal close
engine.close();
}
// now the engine is closed check we respond correctly
expectThrows(AlreadyClosedException.class, () -> engine.index(indexForDoc(doc1)));
expectThrows(AlreadyClosedException.class,
() -> engine.delete(new Engine.Delete(
"1",
newUid(doc1),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0)));
expectThrows(AlreadyClosedException.class,
() -> engine.noOp(
new Engine.NoOp(engine.getLocalCheckpointTracker().generateSeqNo(),
engine.config().getPrimaryTermSupplier().getAsLong(),
randomFrom(Engine.Operation.Origin.values()),
randomNonNegativeLong(),
"test")));
}
}
}
@Test
public void testDeleteWithFatalError() throws Exception {
final IllegalStateException tragicException = new IllegalStateException("fail to store tombstone");
try (Store store = createStore()) {
EngineConfig.TombstoneDocSupplier tombstoneDocSupplier = new EngineConfig.TombstoneDocSupplier() {
@Override
public ParsedDocument newDeleteTombstoneDoc(String id) {
ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(id);
parsedDocument.rootDoc().add(new StoredField("foo", "bar") {
// this is a hack to add a failure during store document which triggers a tragic event
// and in turn fails the engine
@Override
public BytesRef binaryValue() {
throw tragicException;
}
});
return parsedDocument;
}
@Override
public ParsedDocument newNoopTombstoneDoc(String reason) {
return tombstoneDocSupplier().newNoopTombstoneDoc(reason);
}
};
EngineConfig config = config(this.engine.config(), store, createTempDir(), tombstoneDocSupplier);
try (InternalEngine engine = createEngine(config)) {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
expectThrows(IllegalStateException.class, () -> engine.delete(
new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
)));
assertTrue(engine.isClosed.get());
assertSame(tragicException, engine.failedEngine.get());
}
}
}
@Test
public void testDoubleDeliveryPrimary() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
final boolean create = randomBoolean();
Engine.Index operation = appendOnlyPrimary(doc, false, 1, create);
Engine.Index retry = appendOnlyPrimary(doc, true, 1, create);
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(indexResult.getTranslogLocation());
Engine.IndexResult retryResult = engine.index(retry);
assertLuceneOperations(engine, 1, create ? 0 : 1, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
} else {
Engine.IndexResult retryResult = engine.index(retry);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
assertNotNull(retryResult.getTranslogLocation());
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, create ? 0 : 1, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(2L));
assertNotNull(retryResult.getTranslogLocation());
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
operation = appendOnlyPrimary(doc, false, 1, create);
retry = appendOnlyPrimary(doc, true, 1, create);
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
} else {
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
@Test
public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
Engine.Index retry = appendOnlyReplica(doc, true, 1, randomIntBetween(0, 5));
Engine.Delete delete = new Engine.Delete(
operation.id(),
operation.uid(),
Math.max(retry.seqNo(), operation.seqNo()) + 1,
operation.primaryTerm(),
operation.version() + 1,
operation.versionType(),
REPLICA,
operation.startTime() + 1,
UNASSIGNED_SEQ_NO,
0
);
// operations with a seq# equal or lower to the local checkpoint are not indexed to lucene
// and the version lookup is skipped
final boolean sameSeqNo = operation.seqNo() == retry.seqNo();
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(indexResult.getTranslogLocation());
engine.delete(delete);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
assertLuceneOperations(engine, 1, 0, 1);
Engine.IndexResult retryResult = engine.index(retry);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(sameSeqNo ? 1L : 2L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) > 0);
} else {
Engine.IndexResult retryResult = engine.index(retry);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
engine.delete(delete);
assertLuceneOperations(engine, 1, 0, 1);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
Engine.IndexResult indexResult = engine.index(operation);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(sameSeqNo ? 1L : 2L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) < 0);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(0, topDocs.totalHits.value);
}
}
@Test
public void testDoubleDeliveryReplicaAppendingOnly() throws IOException {
final Supplier<ParsedDocument> doc = () -> testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
boolean replicaOperationIsRetry = randomBoolean();
Engine.Index operation = appendOnlyReplica(doc.get(), replicaOperationIsRetry, 1, randomIntBetween(0, 5));
Engine.IndexResult result = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertEquals(0, engine.getNumVersionLookups());
assertNotNull(result.getTranslogLocation());
// promote to primary: first do refresh
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
final boolean create = randomBoolean();
operation = appendOnlyPrimary(doc.get(), false, 1, create);
Engine.Index retry = appendOnlyPrimary(doc.get(), true, 1, create);
if (randomBoolean()) {
// if the replica operation wasn't a retry, the operation arriving on the newly promoted primary must be a retry
if (replicaOperationIsRetry) {
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
} else {
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
@Test
public void testDoubleDeliveryReplica() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = replicaIndexForDoc(doc, 1, 20, false);
Engine.Index duplicate = replicaIndexForDoc(doc, 1, 20, true);
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(indexResult.getTranslogLocation());
if (randomBoolean()) {
engine.refresh("test");
}
Engine.IndexResult retryResult = engine.index(duplicate);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) > 0);
} else {
Engine.IndexResult retryResult = engine.index(duplicate);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
if (randomBoolean()) {
engine.refresh("test");
}
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) < 0);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
List<Translog.Operation> ops = readAllOperationsInLucene(engine, createMapperService("test"));
assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L));
}
}
@Test
public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
boolean isRetry = false;
long autoGeneratedIdTimestamp = 0;
Engine.Index index = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
randomBoolean() ? Versions.MATCH_DELETED : Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0
);
Engine.IndexResult indexResult = engine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(),
null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
isRetry = true;
index = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0
);
indexResult = engine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
assertNotEquals(indexResult.getSeqNo(), UNASSIGNED_SEQ_NO);
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(),
null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(index);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
replicaEngine.refresh("test");
try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
@Test
public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
boolean isRetry = true;
long autoGeneratedIdTimestamp = 0;
Engine.Index firstIndexRequest = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
randomBoolean() ? Versions.MATCH_DELETED : Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0
);
Engine.IndexResult result = engine.index(firstIndexRequest);
assertThat(result.getVersion(), equalTo(1L));
Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(),
result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexReplicaResult = replicaEngine.index(firstIndexRequestReplica);
assertThat(indexReplicaResult.getVersion(), equalTo(1L));
isRetry = false;
Engine.Index secondIndexRequest = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
Versions.MATCH_DELETED,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0);
Engine.IndexResult indexResult = engine.index(secondIndexRequest);
assertFalse(indexResult.isCreated());
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(),
result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
replicaEngine.index(secondIndexRequestReplica);
replicaEngine.refresh("test");
try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
public Engine.Index randomAppendOnly(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp) {
if (randomBoolean()) {
return appendOnlyPrimary(doc, retry, autoGeneratedIdTimestamp);
} else {
return appendOnlyReplica(doc, retry, autoGeneratedIdTimestamp, 0);
}
}
public Engine.Index appendOnlyPrimary(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, boolean create) {
return new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, create ? Versions.MATCH_DELETED : Versions.MATCH_ANY,
VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), autoGeneratedIdTimestamp, retry,
UNASSIGNED_SEQ_NO, 0);
}
public Engine.Index appendOnlyPrimary(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp) {
return appendOnlyPrimary(doc, retry, autoGeneratedIdTimestamp, randomBoolean());
}
public Engine.Index appendOnlyReplica(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, final long seqNo) {
return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, null,
Engine.Operation.Origin.REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, retry, UNASSIGNED_SEQ_NO, 0);
}
@Test
public void testAppendConcurrently() throws InterruptedException, IOException {
Thread[] thread = new Thread[randomIntBetween(3, 5)];
int numDocs = randomIntBetween(1000, 10000);
assertEquals(0, engine.getNumVersionLookups());
assertEquals(0, engine.getNumIndexVersionsLookups());
boolean primary = randomBoolean();
List<Engine.Index> docs = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
final ParsedDocument doc = testParsedDocument(Integer.toString(i), null,
testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index index = primary ? appendOnlyPrimary(doc, false, i) : appendOnlyReplica(doc, false, i, i);
docs.add(index);
}
Collections.shuffle(docs, random());
CountDownLatch startGun = new CountDownLatch(thread.length);
AtomicInteger offset = new AtomicInteger(-1);
for (int i = 0; i < thread.length; i++) {
thread[i] = new Thread() {
@Override
public void run() {
startGun.countDown();
try {
startGun.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
assertThat(engine.getVersionMap().values(), empty());
int docOffset;
while ((docOffset = offset.incrementAndGet()) < docs.size()) {
try {
engine.index(docs.get(docOffset));
} catch (IOException e) {
throw new AssertionError(e);
}
}
}
};
thread[i].start();
}
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
assertEquals("unexpected refresh", 0, searcher.getIndexReader().maxDoc());
}
for (int i = 0; i < thread.length; i++) {
thread[i].join();
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
int count = searcher.count(new MatchAllDocsQuery());
assertEquals(docs.size(), count);
}
assertEquals(0, engine.getNumVersionLookups());
assertEquals(0, engine.getNumIndexVersionsLookups());
assertThat(engine.getMaxSeenAutoIdTimestamp(),
equalTo(docs.stream().mapToLong(Engine.Index::getAutoGeneratedIdTimestamp).max().getAsLong()));
assertLuceneOperations(engine, numDocs, 0, 0);
}
public static long getNumVersionLookups(InternalEngine engine) { // for other tests to access this
return engine.getNumVersionLookups();
}
public static long getNumIndexVersionsLookups(InternalEngine engine) { // for other tests to access this
return engine.getNumIndexVersionsLookups();
}
@Test
public void testFailEngineOnRandomIO() throws IOException, InterruptedException {
MockDirectoryWrapper wrapper = newMockDirectory();
final Path translogPath = createTempDir("testFailEngineOnRandomIO");
try (Store store = createStore(wrapper)) {
CyclicBarrier join = new CyclicBarrier(2);
CountDownLatch start = new CountDownLatch(1);
AtomicInteger controller = new AtomicInteger(0);
EngineConfig config = config(defaultSettings, store, translogPath, newMergePolicy(), new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() throws IOException {
}
@Override
public void afterRefresh(boolean didRefresh) throws IOException {
int i = controller.incrementAndGet();
if (i == 1) {
throw new MockDirectoryWrapper.FakeIOException();
} else if (i == 2) {
try {
start.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
throw new ElasticsearchException("something completely different");
}
}
});
InternalEngine internalEngine = createEngine(config);
int docId = 0;
final ParsedDocument doc = testParsedDocument(Integer.toString(docId), null,
testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index index = randomBoolean() ? indexForDoc(doc) : randomAppendOnly(doc, false, docId);
internalEngine.index(index);
Runnable r = () -> {
try {
join.await();
} catch (Exception e) {
throw new AssertionError(e);
}
try {
internalEngine.refresh("test");
fail();
} catch (AlreadyClosedException ex) {
if (ex.getCause() != null) {
assertTrue(ex.toString(), ex.getCause() instanceof MockDirectoryWrapper.FakeIOException);
}
} catch (RefreshFailedEngineException ex) {
// fine
} finally {
start.countDown();
}
};
Thread t = new Thread(r);
Thread t1 = new Thread(r);
t.start();
t1.start();
t.join();
t1.join();
assertTrue(internalEngine.isClosed.get());
assertTrue(internalEngine.failedEngine.get() instanceof MockDirectoryWrapper.FakeIOException);
}
}
@Test
public void testSequenceIDs() throws Exception {
Tuple<Long, Long> seqID = getSequenceID(engine, new Engine.Get("type", newUid("1")));
// Non-existent doc returns no seqnum and no primary term
assertThat(seqID.v1(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(seqID.v2(), equalTo(0L));
// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
seqID = getSequenceID(engine, newGet(doc));
logger.info("--> got seqID: {}", seqID);
assertThat(seqID.v1(), equalTo(0L));
assertThat(seqID.v2(), equalTo(primaryTerm.get()));
// Index the same document again
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
seqID = getSequenceID(engine, newGet(doc));
logger.info("--> got seqID: {}", seqID);
assertThat(seqID.v1(), equalTo(1L));
assertThat(seqID.v2(), equalTo(primaryTerm.get()));
// Index the same document for the third time, this time changing the primary term
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 3,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY,
System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
engine.refresh("test");
seqID = getSequenceID(engine, newGet(doc));
logger.info("--> got seqID: {}", seqID);
assertThat(seqID.v1(), equalTo(2L));
assertThat(seqID.v2(), equalTo(3L));
// we can query by the _seq_no
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(LongPoint.newExactQuery("_seq_no", 2), 1));
searchResult.close();
}
@Test
public void testLookupSeqNoByIdInLucene() throws Exception {
int numOps = between(10, 100);
long seqNo = 0;
List<Engine.Operation> operations = new ArrayList<>(numOps);
for (int i = 0; i < numOps; i++) {
String id = Integer.toString(between(1, 50));
boolean isIndexing = randomBoolean();
int copies = frequently() ? 1 : between(2, 4);
for (int c = 0; c < copies; c++) {
final ParsedDocument doc = EngineTestCase.createParsedDoc(id, null);
if (isIndexing) {
operations.add(new Engine.Index(EngineTestCase.newUid(doc), doc, seqNo, primaryTerm.get(),
i, null, Engine.Operation.Origin.REPLICA, threadPool.relativeTimeInMillis(), -1, true, UNASSIGNED_SEQ_NO, 0L));
} else {
operations.add(new Engine.Delete(
doc.id(),
EngineTestCase.newUid(doc),
seqNo,
primaryTerm.get(),
i,
null,
Engine.Operation.Origin.REPLICA,
threadPool.relativeTimeInMillis(),
UNASSIGNED_SEQ_NO,
0L
));
}
}
seqNo++;
if (rarely()) {
seqNo++;
}
}
Randomness.shuffle(operations);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
Map<String, Engine.Operation> latestOps = new HashMap<>(); // id -> latest seq_no
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null))) {
CheckedRunnable<IOException> lookupAndCheck = () -> {
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
Map<String, Long> liveOps = latestOps.entrySet().stream()
.filter(e -> e.getValue().operationType() == Engine.Operation.TYPE.INDEX)
.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().seqNo()));
assertThat(getDocIds(engine, true).stream().collect(Collectors.toMap(e -> e.getId(), e -> e.getSeqNo())),
equalTo(liveOps));
for (String id : latestOps.keySet()) {
String msg = "latestOps=" + latestOps + " op=" + id;
DocIdAndSeqNo docIdAndSeqNo = VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), newUid(id));
if (liveOps.containsKey(id) == false) {
assertNull(msg, docIdAndSeqNo);
} else {
assertNotNull(msg, docIdAndSeqNo);
assertThat(msg, docIdAndSeqNo.seqNo, equalTo(latestOps.get(id).seqNo()));
}
}
String notFoundId = randomValueOtherThanMany(liveOps::containsKey, () -> Long.toString(randomNonNegativeLong()));
assertNull(VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), newUid(notFoundId)));
}
};
for (Engine.Operation op : operations) {
if (op instanceof Engine.Index) {
engine.index((Engine.Index) op);
if (latestOps.containsKey(op.id()) == false || latestOps.get(op.id()).seqNo() < op.seqNo()) {
latestOps.put(op.id(), op);
}
} else if (op instanceof Engine.Delete) {
engine.delete((Engine.Delete) op);
if (latestOps.containsKey(op.id()) == false || latestOps.get(op.id()).seqNo() < op.seqNo()) {
latestOps.put(op.id(), op);
}
}
if (randomInt(100) < 10) {
engine.refresh("test");
lookupAndCheck.run();
}
if (rarely()) {
engine.flush(false, true);
lookupAndCheck.run();
}
}
engine.refresh("test");
lookupAndCheck.run();
}
}
/**
* A sequence number generator that will generate a sequence number and if {@code stall} is set to true will wait on the barrier and the
* referenced latch before returning. If the local checkpoint should advance (because {@code stall} is false, then the value of
* {@code expectedLocalCheckpoint} is set accordingly.
*
* @param latchReference to latch the thread for the purpose of stalling
* @param barrier to signal the thread has generated a new sequence number
* @param stall whether or not the thread should stall
* @param expectedLocalCheckpoint the expected local checkpoint after generating a new sequence
* number
* @return a sequence number generator
*/
private ToLongBiFunction<Engine, Engine.Operation> getStallingSeqNoGenerator(
final AtomicReference<CountDownLatch> latchReference,
final CyclicBarrier barrier,
final AtomicBoolean stall,
final AtomicLong expectedLocalCheckpoint) {
return (engine, operation) -> {
final long seqNo = generateNewSeqNo(engine);
final CountDownLatch latch = latchReference.get();
if (stall.get()) {
try {
barrier.await();
latch.await();
} catch (BrokenBarrierException | InterruptedException e) {
throw new RuntimeException(e);
}
} else {
if (expectedLocalCheckpoint.get() + 1 == seqNo) {
expectedLocalCheckpoint.set(seqNo);
}
}
return seqNo;
};
}
@Test
public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws BrokenBarrierException, InterruptedException, IOException {
engine.close();
final int docs = randomIntBetween(1, 32);
InternalEngine initialEngine = null;
try {
final AtomicReference<CountDownLatch> latchReference = new AtomicReference<>(new CountDownLatch(1));
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicBoolean stall = new AtomicBoolean();
final AtomicLong expectedLocalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final List<Thread> threads = new ArrayList<>();
initialEngine =
createEngine(defaultSettings, store, primaryTranslogDir,
newMergePolicy(), null, LocalCheckpointTracker::new, null,
getStallingSeqNoGenerator(latchReference, barrier, stall, expectedLocalCheckpoint));
final InternalEngine finalInitialEngine = initialEngine;
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
stall.set(randomBoolean());
final Thread thread = new Thread(() -> {
try {
finalInitialEngine.index(indexForDoc(doc));
} catch (IOException e) {
throw new AssertionError(e);
}
});
thread.start();
if (stall.get()) {
threads.add(thread);
barrier.await();
} else {
thread.join();
}
}
assertThat(initialEngine.getProcessedLocalCheckpoint(), equalTo(expectedLocalCheckpoint.get()));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo((long) (docs - 1)));
initialEngine.flush(true, true);
assertEquals(initialEngine.getProcessedLocalCheckpoint(), initialEngine.getPersistedLocalCheckpoint());
latchReference.get().countDown();
for (final Thread thread : threads) {
thread.join();
}
} finally {
IOUtils.close(initialEngine);
}
try (var recoveringEngine = new InternalEngine(initialEngine.config())) {
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
recoveringEngine.fillSeqNoGaps(2);
assertEquals(recoveringEngine.getProcessedLocalCheckpoint(), recoveringEngine.getPersistedLocalCheckpoint());
assertThat(recoveringEngine.getProcessedLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1)));
}
}
@Test
public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOException {
final List<Engine.Operation> operations = new ArrayList<>();
final int numberOfOperations = randomIntBetween(16, 32);
final AtomicLong sequenceNumber = new AtomicLong();
final Engine.Operation.Origin origin = randomFrom(LOCAL_TRANSLOG_RECOVERY, PEER_RECOVERY, PRIMARY, REPLICA);
final LongSupplier sequenceNumberSupplier =
origin == PRIMARY ? () -> UNASSIGNED_SEQ_NO : sequenceNumber::getAndIncrement;
final Supplier<ParsedDocument> doc = () -> {
final Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
return testParsedDocument("1", null, document, B_1, null);
};
final Term uid = newUid("1");
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
for (int i = 0; i < numberOfOperations; i++) {
if (randomBoolean()) {
final Engine.Index index = new Engine.Index(
uid,
doc.get(),
sequenceNumberSupplier.getAsLong(),
1,
i,
origin == PRIMARY ? VersionType.EXTERNAL : null,
origin,
System.nanoTime(),
Translog.UNSET_AUTO_GENERATED_TIMESTAMP,
false, UNASSIGNED_SEQ_NO, 0);
operations.add(index);
} else {
final Engine.Delete delete = new Engine.Delete(
"1",
uid,
sequenceNumberSupplier.getAsLong(),
1,
i,
origin == PRIMARY ? VersionType.EXTERNAL : null,
origin,
System.nanoTime(), UNASSIGNED_SEQ_NO, 0);
operations.add(delete);
}
}
final boolean exists = operations.get(operations.size() - 1) instanceof Engine.Index;
Randomness.shuffle(operations);
for (final Engine.Operation operation : operations) {
if (operation instanceof Engine.Index) {
engine.index((Engine.Index) operation);
} else {
engine.delete((Engine.Delete) operation);
}
}
final long expectedLocalCheckpoint;
if (origin == PRIMARY) {
// we can only advance as far as the number of operations that did not conflict
int count = 0;
// each time the version increments as we walk the list, that counts as a successful operation
long version = -1;
for (int i = 0; i < numberOfOperations; i++) {
if (operations.get(i).version() >= version) {
count++;
version = operations.get(i).version();
}
}
// sequence numbers start at zero, so the expected local checkpoint is the number of successful operations minus one
expectedLocalCheckpoint = count - 1;
} else {
expectedLocalCheckpoint = numberOfOperations - 1;
}
assertThat(engine.getProcessedLocalCheckpoint(), equalTo(expectedLocalCheckpoint));
try (Engine.GetResult result = engine.get(new Engine.Get("2", uid), searcherFactory)) {
assertThat(result.docIdAndVersion() != null, equalTo(exists));
}
}
/**
* Test that we do not leak out information on a deleted doc due to it existing in version map. There are at least 2 cases:
* <ul>
* <li>Guessing the deleted seqNo makes the operation succeed</li>
* <li>Providing any other seqNo leaks info that the doc was deleted (and its SeqNo)</li>
* </ul>
*/
public void testVersionConflictIgnoreDeletedDoc() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
engine.delete(new Engine.Delete("1", newUid("1"), 1));
for (long seqNo : new long[]{0, 1, randomNonNegativeLong()}) {
assertDeletedVersionConflict(engine.index(new Engine.Index(newUid("1"), doc, UNASSIGNED_SEQ_NO, 1,
Versions.MATCH_ANY, VersionType.INTERNAL,
PRIMARY, randomNonNegativeLong(), UNSET_AUTO_GENERATED_TIMESTAMP, false, seqNo, 1)),
"update: " + seqNo);
assertDeletedVersionConflict(engine.delete(new Engine.Delete("1", newUid("1"), UNASSIGNED_SEQ_NO, 1,
Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, randomNonNegativeLong(), seqNo, 1)),
"delete: " + seqNo);
}
}
private void assertDeletedVersionConflict(Engine.Result result, String operation) {
assertNotNull("Must have failure for " + operation, result.getFailure());
assertThat(operation, result.getFailure(), Matchers.instanceOf(VersionConflictEngineException.class));
VersionConflictEngineException exception = (VersionConflictEngineException) result.getFailure();
assertThat(operation, exception.getMessage(), containsString("but no document was found"));
}
/*
* This test tests that a no-op does not generate a new sequence number, that no-ops can advance the local checkpoint, and that no-ops
* are correctly added to the translog.
*/
@Test
public void testNoOps() throws IOException {
engine.close();
InternalEngine noOpEngine = null;
final int maxSeqNo = randomIntBetween(0, 128);
final int localCheckpoint = randomIntBetween(0, maxSeqNo);
try {
final BiFunction<Long, Long, LocalCheckpointTracker> supplier = (ms, lcp) -> new LocalCheckpointTracker(
maxSeqNo,
localCheckpoint);
EngineConfig noopEngineConfig = copy(engine.config(), new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD,
() -> new MatchAllDocsQuery(), engine.config().getMergePolicy()));
noOpEngine = new InternalEngine(noopEngineConfig, supplier) {
@Override
protected long doGenerateSeqNoForOperation(Operation operation) {
throw new UnsupportedOperationException();
}
};
noOpEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get());
final String reason = "filling gaps";
noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason));
assertThat(noOpEngine.getProcessedLocalCheckpoint(), equalTo((long) (maxSeqNo + 1)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled));
noOpEngine.noOp(
new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(),
randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason));
assertThat(noOpEngine.getProcessedLocalCheckpoint(), equalTo((long) (maxSeqNo + 2)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1));
// skip to the op that we added to the translog
Translog.Operation op;
Translog.Operation last = null;
try (Translog.Snapshot snapshot = noOpEngine.getTranslog().newSnapshot()) {
while ((op = snapshot.next()) != null) {
last = op;
}
}
assertNotNull(last);
assertThat(last, instanceOf(Translog.NoOp.class));
final Translog.NoOp noOp = (Translog.NoOp) last;
assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 2)));
assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get()));
assertThat(noOp.reason(), equalTo(reason));
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
MapperService mapperService = createMapperService("test");
List<Translog.Operation> operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService);
assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop.
for (int i = 0; i < operationsFromLucene.size(); i++) {
assertThat(operationsFromLucene.get(i),
equalTo(new Translog.NoOp(localCheckpoint + 1 + i, primaryTerm.get(), "filling gaps")));
}
assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine, mapperService);
}
} finally {
IOUtils.close(noOpEngine);
}
}
/**
* Verifies that a segment containing only no-ops can be used to look up _version and _seqno.
*/
@Test
public void testSegmentContainsOnlyNoOps() throws Exception {
Engine.NoOpResult noOpResult = engine.noOp(new Engine.NoOp(1, primaryTerm.get(),
randomFrom(Engine.Operation.Origin.values()), randomNonNegativeLong(), "test"));
assertThat(noOpResult.getFailure(), nullValue());
engine.refresh("test");
Engine.DeleteResult deleteResult = engine.delete(replicaDeleteForDoc("id", 1, 2, randomNonNegativeLong()));
assertThat(deleteResult.getFailure(), nullValue());
engine.refresh("test");
}
/**
* A simple test to check that random combination of operations can coexist in segments and be lookup.
* This is needed as some fields in Lucene may not exist if a segment misses operation types and this code is to check for that.
* For example, a segment containing only no-ops does not have neither _uid or _version.
*/
@Test
public void testRandomOperations() throws Exception {
int numOps = between(10, 100);
for (int i = 0; i < numOps; i++) {
String id = Integer.toString(randomIntBetween(1, 10));
ParsedDocument doc = createParsedDoc(id, null);
Engine.Operation.TYPE type = randomFrom(Engine.Operation.TYPE.values());
switch (type) {
case INDEX:
Engine.IndexResult index = engine.index(replicaIndexForDoc(doc, between(1, 100), i, randomBoolean()));
assertThat(index.getFailure(), nullValue());
break;
case DELETE:
Engine.DeleteResult delete = engine.delete(replicaDeleteForDoc(doc.id(), between(1, 100), i, randomNonNegativeLong()));
assertThat(delete.getFailure(), nullValue());
break;
case NO_OP:
Engine.NoOpResult noOp = engine.noOp(new Engine.NoOp(i, primaryTerm.get(),
randomFrom(Engine.Operation.Origin.values()), randomNonNegativeLong(), ""));
assertThat(noOp.getFailure(), nullValue());
break;
default:
throw new IllegalStateException("Invalid op [" + type + "]");
}
if (randomBoolean()) {
engine.refresh("test");
}
if (randomBoolean()) {
engine.flush();
}
if (randomBoolean()) {
engine.forceMerge(randomBoolean(), between(1, 10), randomBoolean(), false, false, UUIDs.randomBase64UUID());
}
}
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
List<Translog.Operation> operations = readAllOperationsInLucene(engine, createMapperService("test"));
assertThat(operations, hasSize(numOps));
}
}
@Test
public void testMinGenerationForSeqNo() throws IOException, BrokenBarrierException, InterruptedException {
engine.close();
final int numberOfTriplets = randomIntBetween(1, 32);
InternalEngine actualEngine = null;
try {
final AtomicReference<CountDownLatch> latchReference = new AtomicReference<>();
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicBoolean stall = new AtomicBoolean();
final AtomicLong expectedLocalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final Map<Thread, CountDownLatch> threads = new LinkedHashMap<>();
actualEngine =
createEngine(defaultSettings, store, primaryTranslogDir,
newMergePolicy(), null, LocalCheckpointTracker::new, null,
getStallingSeqNoGenerator(latchReference, barrier, stall, expectedLocalCheckpoint));
final InternalEngine finalActualEngine = actualEngine;
final Translog translog = finalActualEngine.getTranslog();
final long generation = finalActualEngine.getTranslog().currentFileGeneration();
for (int i = 0; i < numberOfTriplets; i++) {
/*
* Index three documents with the first and last landing in the same generation and the middle document being stalled until
* a later generation.
*/
stall.set(false);
index(finalActualEngine, 3 * i);
final CountDownLatch latch = new CountDownLatch(1);
latchReference.set(latch);
final int skipId = 3 * i + 1;
stall.set(true);
final Thread thread = new Thread(() -> {
try {
index(finalActualEngine, skipId);
} catch (IOException e) {
throw new AssertionError(e);
}
});
thread.start();
threads.put(thread, latch);
barrier.await();
stall.set(false);
index(finalActualEngine, 3 * i + 2);
finalActualEngine.flush();
/*
* This sequence number landed in the last generation, but the lower and upper bounds for an earlier generation straddle
* this sequence number.
*/
assertThat(translog.getMinGenerationForSeqNo(3 * i + 1).translogFileGeneration, equalTo(i + generation));
}
int i = 0;
for (final Map.Entry<Thread, CountDownLatch> entry : threads.entrySet()) {
final Map<String, String> userData = finalActualEngine.commitStats().getUserData();
assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(3 * i)));
entry.getValue().countDown();
entry.getKey().join();
finalActualEngine.flush();
i++;
}
} finally {
IOUtils.close(actualEngine);
}
}
private void index(final InternalEngine engine, final int id) throws IOException {
final String docId = Integer.toString(id);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
}
/**
* Return a tuple representing the sequence ID for the given {@code Get}
* operation. The first value in the tuple is the sequence number, the
* second is the primary term.
*/
private Tuple<Long, Long> getSequenceID(Engine engine, Engine.Get get) throws EngineException {
try (Searcher searcher = engine.acquireSearcher("get", Engine.SearcherScope.INTERNAL)) {
final long primaryTerm;
final long seqNo;
DocIdAndSeqNo docIdAndSeqNo = VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), get.uid());
if (docIdAndSeqNo == null) {
primaryTerm = 0;
seqNo = UNASSIGNED_SEQ_NO;
} else {
seqNo = docIdAndSeqNo.seqNo;
NumericDocValues primaryTerms = docIdAndSeqNo.context.reader().getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
if (primaryTerms == null || primaryTerms.advanceExact(docIdAndSeqNo.docId) == false) {
throw new AssertionError("document does not have primary term [" + docIdAndSeqNo.docId + "]");
}
primaryTerm = primaryTerms.longValue();
}
return new Tuple<>(seqNo, primaryTerm);
} catch (Exception e) {
throw new EngineException(shardId, "unable to retrieve sequence id", e);
}
}
@Test
public void testRestoreLocalHistoryFromTranslog() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore()) {
final ArrayList<Long> seqNos = new ArrayList<>();
final int numOps = randomIntBetween(0, 1024);
for (int i = 0; i < numOps; i++) {
if (rarely()) {
continue;
}
seqNos.add((long) i);
}
Randomness.shuffle(seqNos);
final EngineConfig engineConfig;
final SeqNoStats prevSeqNoStats;
final List<DocIdSeqNoAndSource> prevDocs;
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
engineConfig = engine.config();
for (final long seqNo : seqNos) {
final String id = Long.toString(seqNo);
final ParsedDocument doc = testParsedDocument(id, null,
testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 1, seqNo, false));
if (rarely()) {
engine.rollTranslogGeneration();
}
if (rarely()) {
engine.flush();
}
}
globalCheckpoint.set(randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
prevSeqNoStats = engine.getSeqNoStats(globalCheckpoint.get());
prevDocs = getDocIds(engine, true);
}
try (InternalEngine engine = new InternalEngine(engineConfig)) {
final long currentTranslogGeneration = engine.getTranslog().currentFileGeneration();
engine.recoverFromTranslog(translogHandler, globalCheckpoint.get());
engine.restoreLocalHistoryFromTranslog(translogHandler);
assertThat(getDocIds(engine, true), equalTo(prevDocs));
SeqNoStats seqNoStats = engine.getSeqNoStats(globalCheckpoint.get());
assertThat(seqNoStats.getLocalCheckpoint(), equalTo(prevSeqNoStats.getLocalCheckpoint()));
assertThat(seqNoStats.getMaxSeqNo(), equalTo(prevSeqNoStats.getMaxSeqNo()));
assertThat("restore from local translog must not add operations to translog",
engine.getTranslog().totalOperationsByMinGen(currentTranslogGeneration), equalTo(0));
}
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test"));
}
}
@Test
public void testFillUpSequenceIdGapsOnRecovery() throws IOException {
final int docs = randomIntBetween(1, 32);
int numDocsOnReplica = 0;
long maxSeqIDOnReplica = -1;
long checkpointOnReplica;
try {
for (int i = 0; i < docs; i++) {
final String docId = Integer.toString(i);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
Engine.Index primaryResponse = indexForDoc(doc);
Engine.IndexResult indexResult = engine.index(primaryResponse);
if (randomBoolean()) {
numDocsOnReplica++;
maxSeqIDOnReplica = indexResult.getSeqNo();
replicaEngine.index(replicaIndexForDoc(doc, 1, indexResult.getSeqNo(), false));
}
}
engine.syncTranslog(); // to advance local checkpoint
replicaEngine.syncTranslog(); // to advance local checkpoint
checkpointOnReplica = replicaEngine.getProcessedLocalCheckpoint();
} finally {
IOUtils.close(replicaEngine);
}
boolean flushed = false;
AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
InternalEngine recoveringEngine = null;
try {
assertEquals(docs - 1, engine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(docs - 1, engine.getProcessedLocalCheckpoint());
assertEquals(maxSeqIDOnReplica, replicaEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(checkpointOnReplica, replicaEngine.getProcessedLocalCheckpoint());
recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get));
assertEquals(numDocsOnReplica, getTranslog(recoveringEngine).stats().getUncommittedOperations());
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(checkpointOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2));
// now snapshot the tlog and ensure the primary term is updated
try (Translog.Snapshot snapshot = getTranslog(recoveringEngine).newSnapshot()) {
assertTrue((maxSeqIDOnReplica + 1) - numDocsOnReplica <= snapshot.totalOperations());
Translog.Operation operation;
while ((operation = snapshot.next()) != null) {
if (operation.opType() == Translog.Operation.Type.NO_OP) {
assertEquals(2, operation.primaryTerm());
} else {
assertEquals(primaryTerm.get(), operation.primaryTerm());
}
}
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(maxSeqIDOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
if ((flushed = randomBoolean())) {
globalCheckpoint.set(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
getTranslog(recoveringEngine).sync();
recoveringEngine.flush(true, true);
}
}
} finally {
IOUtils.close(recoveringEngine);
}
// now do it again to make sure we preserve values etc.
try {
recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get));
if (flushed) {
assertThat(recoveringEngine.getTranslogStats().getUncommittedOperations(), equalTo(0));
}
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(maxSeqIDOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
assertEquals(0, recoveringEngine.fillSeqNoGaps(3));
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(maxSeqIDOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
} finally {
IOUtils.close(recoveringEngine);
}
}
public void assertSameReader(Searcher left, Searcher right) {
List<LeafReaderContext> leftLeaves = ElasticsearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves();
List<LeafReaderContext> rightLeaves = ElasticsearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves();
assertEquals(rightLeaves.size(), leftLeaves.size());
for (int i = 0; i < leftLeaves.size(); i++) {
assertSame(leftLeaves.get(i).reader(), rightLeaves.get(i).reader());
}
}
public void assertNotSameReader(Searcher left, Searcher right) {
List<LeafReaderContext> leftLeaves = ElasticsearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves();
List<LeafReaderContext> rightLeaves = ElasticsearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves();
if (rightLeaves.size() == leftLeaves.size()) {
for (int i = 0; i < leftLeaves.size(); i++) {
if (leftLeaves.get(i).reader() != rightLeaves.get(i).reader()) {
return; // all is well
}
}
fail("readers are same");
}
}
@Test
public void testRefreshScopedSearcher() throws IOException {
try (Store store = createStore();
InternalEngine engine =
// disable merges to make sure that the reader doesn't change unexpectedly during the test
createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) {
engine.refresh("warm_up");
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertSameReader(getSearcher, searchSearcher);
}
for (int i = 0; i < 10; i++) {
final String docId = Integer.toString(i);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
Engine.Index primaryResponse = indexForDoc(doc);
engine.index(primaryResponse);
}
assertTrue(engine.refreshNeeded());
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertEquals(10, getSearcher.getIndexReader().numDocs());
assertEquals(0, searchSearcher.getIndexReader().numDocs());
assertNotSameReader(getSearcher, searchSearcher);
}
engine.refresh("test", Engine.SearcherScope.EXTERNAL, true);
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertEquals(10, getSearcher.getIndexReader().numDocs());
assertEquals(10, searchSearcher.getIndexReader().numDocs());
assertSameReader(getSearcher, searchSearcher);
}
// now ensure external refreshes are reflected on the internal reader
final String docId = Integer.toString(10);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
Engine.Index primaryResponse = indexForDoc(doc);
engine.index(primaryResponse);
engine.refresh("test", Engine.SearcherScope.EXTERNAL, true);
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertEquals(11, getSearcher.getIndexReader().numDocs());
assertEquals(11, searchSearcher.getIndexReader().numDocs());
assertSameReader(getSearcher, searchSearcher);
}
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
try (Searcher nextSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
assertSame(searcher.getIndexReader(), nextSearcher.getIndexReader());
}
}
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
engine.refresh("test", Engine.SearcherScope.EXTERNAL, true);
try (Searcher nextSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertSame(searcher.getIndexReader(), nextSearcher.getIndexReader());
}
}
}
}
@Test
public void testSeqNoGenerator() throws IOException {
engine.close();
final long seqNo = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Integer.MAX_VALUE);
final BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier = (ms, lcp) -> new LocalCheckpointTracker(
SequenceNumbers.NO_OPS_PERFORMED,
SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong seqNoGenerator = new AtomicLong(seqNo);
try (Engine e = createEngine(defaultSettings, store, primaryTranslogDir,
newMergePolicy(), null, localCheckpointTrackerSupplier,
null, (engine, operation) -> seqNoGenerator.getAndIncrement())) {
final String id = "id";
final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE);
final Field versionField = new NumericDocValuesField("_version", 0);
final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
final ParseContext.Document document = new ParseContext.Document();
document.add(uidField);
document.add(versionField);
document.add(seqID.seqNo);
document.add(seqID.seqNoDocValue);
document.add(seqID.primaryTerm);
final BytesReference source = new BytesArray(new byte[]{1});
final ParsedDocument parsedDocument = new ParsedDocument(
versionField,
seqID,
id,
"routing",
Collections.singletonList(document),
source,
null);
final Engine.Index index = new Engine.Index(
new Term("_id", parsedDocument.id()),
parsedDocument,
UNASSIGNED_SEQ_NO,
randomIntBetween(1, 8),
Versions.NOT_FOUND,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
-1,
randomBoolean(),
UNASSIGNED_SEQ_NO,
0);
final Engine.IndexResult indexResult = e.index(index);
assertThat(indexResult.getSeqNo(), equalTo(seqNo));
assertThat(seqNoGenerator.get(), equalTo(seqNo + 1));
final Engine.Delete delete = new Engine.Delete(
id,
new Term("_id", parsedDocument.id()),
UNASSIGNED_SEQ_NO,
randomIntBetween(1, 8),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0);
final Engine.DeleteResult deleteResult = e.delete(delete);
assertThat(deleteResult.getSeqNo(), equalTo(seqNo + 1));
assertThat(seqNoGenerator.get(), equalTo(seqNo + 2));
}
}
@Test
public void testKeepTranslogAfterGlobalCheckpoint() throws Exception {
IOUtils.close(engine, store);
final IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetadata(), defaultSettings.getNodeSettings(),
defaultSettings.getScopedSettings());
IndexMetadata.Builder builder = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomFrom("-1", "100micros", "30m"))
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), randomFrom("-1", "512b", "1gb")));
indexSettings.updateIndexMetadata(builder.build());
final Path translogPath = createTempDir();
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
store.createEmpty(Version.CURRENT.luceneVersion);
final String translogUUID = Translog.createEmptyTranslog(translogPath, globalCheckpoint.get(), shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
final EngineConfig engineConfig = config(indexSettings, store, translogPath,
NoMergePolicy.INSTANCE, null, null, () -> globalCheckpoint.get());
final AtomicLong lastSyncedGlobalCheckpointBeforeCommit = new AtomicLong(Translog.readGlobalCheckpoint(translogPath, translogUUID));
try (InternalEngine engine = new InternalEngine(engineConfig) {
@Override
protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException {
lastSyncedGlobalCheckpointBeforeCommit.set(Translog.readGlobalCheckpoint(translogPath, translogUUID));
// Advance the global checkpoint during the flush to create a lag between a persisted global checkpoint in the translog
// (this value is visible to the deletion policy) and an in memory global checkpoint in the SequenceNumbersService.
if (rarely()) {
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getPersistedLocalCheckpoint()));
}
super.commitIndexWriter(writer, translog, syncId);
}
}) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
int numDocs = scaledRandomIntBetween(10, 100);
for (int docId = 0; docId < numDocs; docId++) {
ParseContext.Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
engine.index(indexForDoc(testParsedDocument(Integer.toString(docId), null, document, B_1, null)));
if (frequently()) {
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
}
if (frequently()) {
engine.flush(randomBoolean(), true);
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
// Keep only one safe commit as the oldest commit.
final IndexCommit safeCommit = commits.get(0);
if (lastSyncedGlobalCheckpointBeforeCommit.get() == UNASSIGNED_SEQ_NO) {
// If the global checkpoint is still unassigned, we keep an empty(eg. initial) commit as a safe commit.
assertThat(Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
equalTo(SequenceNumbers.NO_OPS_PERFORMED));
} else {
assertThat(Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
lessThanOrEqualTo(lastSyncedGlobalCheckpointBeforeCommit.get()));
}
for (int i = 1; i < commits.size(); i++) {
assertThat(Long.parseLong(commits.get(i).getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
greaterThan(lastSyncedGlobalCheckpointBeforeCommit.get()));
}
// Make sure we keep all translog operations after the local checkpoint of the safe commit.
long localCheckpointFromSafeCommit = Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot, SnapshotMatchers.containsSeqNoRange(localCheckpointFromSafeCommit + 1, docId));
}
}
}
}
}
@Test
public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, IOException {
int numDocs = scaledRandomIntBetween(100, 1000);
CountDownLatch latch = new CountDownLatch(2);
AtomicBoolean done = new AtomicBoolean(false);
AtomicInteger numDeletes = new AtomicInteger();
Thread thread = new Thread(() -> {
try {
latch.countDown();
latch.await();
for (int j = 0; j < numDocs; j++) {
String docID = Integer.toString(j);
ParsedDocument doc = testParsedDocument(docID, null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = appendOnlyPrimary(doc, false, 1);
engine.index(operation);
if (rarely()) {
engine.delete(new Engine.Delete(
operation.id(),
operation.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
numDeletes.incrementAndGet();
} else {
doc = testParsedDocument(docID, null, testDocumentWithTextField("updated"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index update = indexForDoc(doc);
engine.index(update);
}
}
} catch (Exception e) {
throw new AssertionError(e);
} finally {
done.set(true);
}
});
thread.start();
latch.countDown();
latch.await();
while (done.get() == false) {
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
}
thread.join();
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs());
for (int i = 0; i < search.scoreDocs.length; i++) {
org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc);
assertEquals("updated", luceneDoc.get("value"));
}
int totalNumDocs = numDocs - numDeletes.get();
assertEquals(totalNumDocs, searcher.getIndexReader().numDocs());
}
}
@Test
public void testAcquireIndexCommit() throws Exception {
IOUtils.close(engine, store);
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
int numDocs = between(1, 20);
for (int i = 0; i < numDocs; i++) {
index(engine, i);
}
if (randomBoolean()) {
globalCheckpoint.set(numDocs - 1);
}
final boolean flushFirst = randomBoolean();
final boolean safeCommit = randomBoolean();
final Engine.IndexCommitRef snapshot;
if (safeCommit) {
snapshot = engine.acquireSafeIndexCommit();
} else {
snapshot = engine.acquireLastIndexCommit(flushFirst);
}
int moreDocs = between(1, 20);
for (int i = 0; i < moreDocs; i++) {
index(engine, numDocs + i);
}
globalCheckpoint.set(numDocs + moreDocs - 1);
engine.flush();
// check that we can still read the commit that we captured
try (IndexReader reader = DirectoryReader.open(snapshot.getIndexCommit())) {
assertThat(reader.numDocs(), equalTo(flushFirst && safeCommit == false ? numDocs : 0));
}
assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(2));
snapshot.close();
// check it's clean up
engine.flush(true, true);
assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(1));
}
}
@Test
public void testCleanUpCommitsWhenGlobalCheckpointAdvanced() throws Exception {
IOUtils.close(engine, store);
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test",
Settings.builder().put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), -1)
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), -1).build());
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore();
InternalEngine engine =
createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(),
null, null, globalCheckpoint::get))) {
final int numDocs = scaledRandomIntBetween(10, 100);
for (int docId = 0; docId < numDocs; docId++) {
index(engine, docId);
if (rarely()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush(false, randomBoolean());
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
assertThat(Long.parseLong(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
lessThanOrEqualTo(globalCheckpoint.get()));
for (int i = 1; i < commits.size(); i++) {
assertThat(Long.parseLong(commits.get(i).getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
greaterThan(globalCheckpoint.get()));
}
// Global checkpoint advanced enough - only the last commit is kept.
globalCheckpoint.set(randomLongBetween(engine.getPersistedLocalCheckpoint(), Long.MAX_VALUE));
engine.syncTranslog();
assertThat(DirectoryReader.listCommits(store.directory()), contains(commits.get(commits.size() - 1)));
assertThat(engine.getTranslog().totalOperations(), equalTo(0));
}
}
@Test
public void testCleanupCommitsWhenReleaseSnapshot() throws Exception {
IOUtils.close(engine, store);
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
final int numDocs = scaledRandomIntBetween(10, 100);
for (int docId = 0; docId < numDocs; docId++) {
index(engine, docId);
if (frequently()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush(false, randomBoolean());
int numSnapshots = between(1, 10);
final List<Engine.IndexCommitRef> snapshots = new ArrayList<>();
for (int i = 0; i < numSnapshots; i++) {
snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit.
}
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.syncTranslog();
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
for (int i = 0; i < numSnapshots - 1; i++) {
snapshots.get(i).close();
// pending snapshots - should not release any commit.
assertThat(DirectoryReader.listCommits(store.directory()), equalTo(commits));
}
snapshots.get(numSnapshots - 1).close(); // release the last snapshot - delete all except the last commit
assertThat(DirectoryReader.listCommits(store.directory()), hasSize(1));
}
}
@Test
public void testShouldPeriodicallyFlush() throws Exception {
assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false));
// A new engine may have more than one empty translog files - the test should account this extra.
final Translog translog = engine.getTranslog();
final IntSupplier uncommittedTranslogOperationsSinceLastCommit = () -> {
long localCheckpoint = Long.parseLong(engine.getLastCommittedSegmentInfos().userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
return translog.totalOperationsByMinGen(translog.getMinGenerationForSeqNo(localCheckpoint + 1).translogFileGeneration);
};
final long extraTranslogSizeInNewEngine =
engine.getTranslog().stats().getUncommittedSizeInBytes() - Translog.DEFAULT_HEADER_SIZE_IN_BYTES;
int numDocs = between(10, 100);
for (int id = 0; id < numDocs; id++) {
final ParsedDocument doc =
testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
}
assertThat("Not exceeded translog flush threshold yet", engine.shouldPeriodicallyFlush(), equalTo(false));
long flushThreshold = RandomNumbers.randomLongBetween(random(), 120,
engine.getTranslog().stats().getUncommittedSizeInBytes()- extraTranslogSizeInNewEngine);
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), flushThreshold + "b")).build();
indexSettings.updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(numDocs));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
engine.flush();
assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(0));
// Stale operations skipped by Lucene but added to translog - still able to flush
for (int id = 0; id < numDocs; id++) {
final ParsedDocument doc =
testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null);
final Engine.IndexResult result = engine.index(replicaIndexForDoc(doc, 1L, id, false));
assertThat(result.isCreated(), equalTo(false));
}
SegmentInfos lastCommitInfo = engine.getLastCommittedSegmentInfos();
assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(numDocs));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
engine.flush(false, false);
assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(0));
// If the new index commit still points to the same translog generation as the current index commit,
// we should not enable the periodically flush condition; otherwise we can get into an infinite loop of flushes.
generateNewSeqNo(engine); // create a gap here
for (int id = 0; id < numDocs; id++) {
if (randomBoolean()) {
translog.rollGeneration();
}
final ParsedDocument doc =
testParsedDocument("new" + id, null, testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 2L, generateNewSeqNo(engine), false));
if (engine.shouldPeriodicallyFlush()) {
engine.flush();
assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
}
}
}
@Test
public void testShouldPeriodicallyFlushAfterMerge() throws Exception {
assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false));
ParsedDocument doc =
testParsedDocument(Integer.toString(0), null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
assertThat("Not exceeded translog flush threshold yet", engine.shouldPeriodicallyFlush(), equalTo(false));
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING.getKey(), "0b")).build();
indexSettings.updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(1));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
doc = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(2));
engine.refresh("test");
engine.forceMerge(false, 1, false, false, false, UUIDs.randomBase64UUID());
assertBusy(() -> {
// the merge listner runs concurrently after the force merge returned
assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
});
engine.flush();
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
}
@Test
public void testStressShouldPeriodicallyFlush() throws Exception {
final long flushThreshold = randomLongBetween(120, 5000);
final long generationThreshold = randomLongBetween(1000, 5000);
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(), generationThreshold + "b")
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), flushThreshold + "b")).build();
indexSettings.updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
final int numOps = scaledRandomIntBetween(100, 10_000);
for (int i = 0; i < numOps; i++) {
final long localCheckPoint = engine.getProcessedLocalCheckpoint();
final long seqno = randomLongBetween(Math.max(0, localCheckPoint), localCheckPoint + 5);
final ParsedDocument doc =
testParsedDocument(Long.toString(seqno), null, testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 1L, seqno, false));
if (rarely() && engine.getTranslog().shouldRollGeneration()) {
engine.rollTranslogGeneration();
}
if (rarely() || engine.shouldPeriodicallyFlush()) {
engine.flush();
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
}
}
}
@Test
public void testStressUpdateSameDocWhileGettingIt() throws IOException, InterruptedException {
final int iters = randomIntBetween(1, 15);
for (int i = 0; i < iters; i++) {
// this is a reproduction of https://github.com/elastic/elasticsearch/issues/28714
try (Store store = createStore(); InternalEngine engine = createEngine(store, createTempDir())) {
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), TimeValue.timeValueMillis(1))).build();
engine.engineConfig.getIndexSettings().updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
ParsedDocument document = testParsedDocument(Integer.toString(0), null, testDocumentWithTextField(), SOURCE, null);
final Engine.Index doc = new Engine.Index(newUid(document), document, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(),
-1, false, UNASSIGNED_SEQ_NO, 0);
// first index an append only document and then delete it. such that we have it in the tombstones
engine.index(doc);
engine.delete(new Engine.Delete(
doc.id(),
doc.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// now index more append only docs and refresh so we re-enabel the optimization for unsafe version map
ParsedDocument document1 = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null);
engine.index(new Engine.Index(newUid(document1), document1, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_ANY, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false,
UNASSIGNED_SEQ_NO, 0));
engine.refresh("test");
ParsedDocument document2 = testParsedDocument(Integer.toString(2), null, testDocumentWithTextField(), SOURCE, null);
engine.index(new Engine.Index(newUid(document2), document2, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_ANY, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false,
UNASSIGNED_SEQ_NO, 0));
engine.refresh("test");
ParsedDocument document3 = testParsedDocument(Integer.toString(3), null, testDocumentWithTextField(), SOURCE, null);
final Engine.Index doc3 = new Engine.Index(newUid(document3), document3, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(),
-1, false, UNASSIGNED_SEQ_NO, 0);
engine.index(doc3);
engine.engineConfig.setEnableGcDeletes(true);
// once we are here the version map is unsafe again and we need to do a refresh inside the get calls to ensure we
// de-optimize. We also enabled GCDeletes which now causes pruning tombstones inside that refresh that is done internally
// to ensure we de-optimize. One get call will purne and the other will try to lock the version map concurrently while
// holding the lock that pruneTombstones needs and we have a deadlock
CountDownLatch awaitStarted = new CountDownLatch(1);
Thread thread = new Thread(() -> {
awaitStarted.countDown();
try (Engine.GetResult getResult = engine.get(new Engine.Get(
doc3.id(), doc3.uid()), engine::acquireSearcher)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
});
thread.start();
awaitStarted.await();
try (Engine.GetResult getResult = engine.get(
new Engine.Get(doc.id(), doc.uid()),
engine::acquireSearcher)) {
assertThat(getResult.docIdAndVersion(), is(nullValue()));
}
thread.join();
}
}
}
@Test
public void testPruneOnlyDeletesAtMostLocalCheckpoint() throws Exception {
final AtomicLong clock = new AtomicLong(0);
threadPool = spy(threadPool);
when(threadPool.relativeTimeInMillis()).thenAnswer(invocation -> clock.get());
final long gcInterval = randomIntBetween(0, 10);
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), TimeValue.timeValueMillis(gcInterval).getStringRep())).build();
indexSettings.updateIndexMetadata(indexMetadata);
try (Store store = createStore();
InternalEngine engine = createEngine(store, createTempDir())) {
engine.config().setEnableGcDeletes(false);
for (int i = 0, docs = scaledRandomIntBetween(0, 10); i < docs; i++) {
index(engine, i);
}
final long deleteBatch = between(10, 20);
final long gapSeqNo = randomLongBetween(
engine.getSeqNoStats(-1).getMaxSeqNo() + 1, engine.getSeqNoStats(-1).getMaxSeqNo() + deleteBatch);
for (int i = 0; i < deleteBatch; i++) {
final long seqno = generateNewSeqNo(engine);
if (seqno != gapSeqNo) {
if (randomBoolean()) {
clock.incrementAndGet();
}
engine.delete(replicaDeleteForDoc(UUIDs.randomBase64UUID(), 1, seqno, threadPool.relativeTimeInMillis()));
}
}
List<DeleteVersionValue> tombstones = new ArrayList<>(tombstonesInVersionMap(engine).values());
engine.config().setEnableGcDeletes(true);
// Prune tombstones whose seqno < gap_seqno and timestamp < clock-gcInterval.
clock.set(randomLongBetween(gcInterval, deleteBatch + gcInterval));
engine.refresh("test");
tombstones.removeIf(v -> v.seqNo < gapSeqNo && v.time < clock.get() - gcInterval);
assertThat(tombstonesInVersionMap(engine).values(), containsInAnyOrder(tombstones.toArray()));
// Prune tombstones whose seqno at most the local checkpoint (eg. seqno < gap_seqno).
clock.set(randomLongBetween(deleteBatch + gcInterval * 4/3, 100)); // Need a margin for gcInterval/4.
engine.refresh("test");
tombstones.removeIf(v -> v.seqNo < gapSeqNo);
assertThat(tombstonesInVersionMap(engine).values(), containsInAnyOrder(tombstones.toArray()));
// Fill the seqno gap - should prune all tombstones.
clock.set(between(0, 100));
if (randomBoolean()) {
engine.index(replicaIndexForDoc(testParsedDocument("d", null, testDocumentWithTextField(),
SOURCE, null), 1, gapSeqNo, false));
} else {
engine.delete(replicaDeleteForDoc(UUIDs.randomBase64UUID(), Versions.MATCH_ANY,
gapSeqNo, threadPool.relativeTimeInMillis()));
}
clock.set(randomLongBetween(100 + gcInterval * 4/3, Long.MAX_VALUE)); // Need a margin for gcInterval/4.
engine.refresh("test");
assertThat(tombstonesInVersionMap(engine).values(), empty());
}
}
@Test
public void testTrimUnsafeCommits() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final int maxSeqNo = 40;
final List<Long> seqNos = LongStream.rangeClosed(0, maxSeqNo).boxed().collect(Collectors.toList());
Collections.shuffle(seqNos, random());
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(),
null, null, globalCheckpoint::get);
final List<Long> commitMaxSeqNo = new ArrayList<>();
final long minTranslogGen;
try (InternalEngine engine = createEngine(config)) {
for (int i = 0; i < seqNos.size(); i++) {
ParsedDocument doc = testParsedDocument(Long.toString(seqNos.get(i)), null, testDocument(),
new BytesArray("{}"), null);
Engine.Index index = new Engine.Index(newUid(doc), doc, seqNos.get(i), 0,
1, null, REPLICA, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
engine.index(index);
if (randomBoolean()) {
engine.flush();
final Long maxSeqNoInCommit = seqNos.subList(0, i + 1).stream().max(Long::compareTo).orElse(-1L);
commitMaxSeqNo.add(maxSeqNoInCommit);
}
}
globalCheckpoint.set(randomInt(maxSeqNo));
engine.syncTranslog();
minTranslogGen = engine.getTranslog().getMinFileGeneration();
}
store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen,config.getIndexSettings().getIndexVersionCreated());
long safeMaxSeqNo =
commitMaxSeqNo.stream().filter(s -> s <= globalCheckpoint.get())
.reduce((s1, s2) -> s2) // get the last one.
.orElse(SequenceNumbers.NO_OPS_PERFORMED);
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
assertThat(commits, hasSize(1));
assertThat(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(safeMaxSeqNo)));
try (IndexReader reader = DirectoryReader.open(commits.get(0))) {
for (LeafReaderContext context: reader.leaves()) {
final NumericDocValues values = context.reader().getNumericDocValues(SeqNoFieldMapper.NAME);
if (values != null) {
for (int docID = 0; docID < context.reader().maxDoc(); docID++) {
if (values.advanceExact(docID) == false) {
throw new AssertionError("Document does not have a seq number: " + docID);
}
assertThat(values.longValue(), lessThanOrEqualTo(globalCheckpoint.get()));
}
}
}
}
}
}
@Test
public void testLuceneHistoryOnPrimary() throws Exception {
final List<Engine.Operation> operations = generateSingleDocHistory(
false, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 10, 300, "1");
assertOperationHistoryInLucene(operations);
}
@Test
public void testLuceneHistoryOnReplica() throws Exception {
final List<Engine.Operation> operations = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 10, 300, "2");
Randomness.shuffle(operations);
assertOperationHistoryInLucene(operations);
}
private void assertOperationHistoryInLucene(List<Engine.Operation> operations) throws IOException {
final MergePolicy keepSoftDeleteDocsMP = new SoftDeletesRetentionMergePolicy(
Lucene.SOFT_DELETES_FIELD, MatchAllDocsQuery::new, engine.config().getMergePolicy());
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10));
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
Set<Long> expectedSeqNos = new HashSet<>();
try (Store store = createStore();
Engine engine = createEngine(config(indexSettings, store, createTempDir(), keepSoftDeleteDocsMP, null))) {
for (Engine.Operation op : operations) {
if (op instanceof Engine.Index) {
Engine.IndexResult indexResult = engine.index((Engine.Index) op);
assertThat(indexResult.getFailure(), nullValue());
expectedSeqNos.add(indexResult.getSeqNo());
} else {
Engine.DeleteResult deleteResult = engine.delete((Engine.Delete) op);
assertThat(deleteResult.getFailure(), nullValue());
expectedSeqNos.add(deleteResult.getSeqNo());
}
if (rarely()) {
engine.refresh("test");
}
if (rarely()) {
engine.flush();
}
if (rarely()) {
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
}
}
MapperService mapperService = createMapperService("test");
List<Translog.Operation> actualOps = readAllOperationsInLucene(engine, mapperService);
assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray()));
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
}
}
@Test
public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException {
IOUtils.close(engine, store);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10));
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final long primaryTerm = randomLongBetween(1, Long.MAX_VALUE);
final AtomicLong retentionLeasesVersion = new AtomicLong();
final AtomicReference<RetentionLeases> retentionLeasesHolder = new AtomicReference<>(
new RetentionLeases(primaryTerm, retentionLeasesVersion.get(), Collections.emptyList()));
final List<Engine.Operation> operations = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 10, 300, "2");
Randomness.shuffle(operations);
Set<Long> existingSeqNos = new HashSet<>();
store = createStore();
engine = createEngine(config(
indexSettings,
store,
createTempDir(),
newMergePolicy(),
null,
null,
globalCheckpoint::get,
retentionLeasesHolder::get
));
assertThat(engine.getMinRetainedSeqNo(), equalTo(0L));
long lastMinRetainedSeqNo = engine.getMinRetainedSeqNo();
for (Engine.Operation op : operations) {
final Engine.Result result;
if (op instanceof Engine.Index) {
result = engine.index((Engine.Index) op);
} else {
result = engine.delete((Engine.Delete) op);
}
existingSeqNos.add(result.getSeqNo());
if (randomBoolean()) {
engine.syncTranslog(); // advance persisted local checkpoint
assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint());
globalCheckpoint.set(
randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getPersistedCheckpoint()));
}
if (randomBoolean()) {
retentionLeasesVersion.incrementAndGet();
final int length = randomIntBetween(0, 8);
final List<RetentionLease> leases = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
final String id = randomAlphaOfLength(8);
final long retainingSequenceNumber = randomLongBetween(0, Math.max(0, globalCheckpoint.get()));
final long timestamp = randomLongBetween(0L, Long.MAX_VALUE);
final String source = randomAlphaOfLength(8);
leases.add(new RetentionLease(id, retainingSequenceNumber, timestamp, source));
}
retentionLeasesHolder.set(new RetentionLeases(primaryTerm, retentionLeasesVersion.get(), leases));
}
if (rarely()) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10));
indexSettings.updateIndexMetadata(IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
}
if (rarely()) {
engine.refresh("test");
}
if (rarely()) {
engine.flush(true, true);
assertThat(Long.parseLong(engine.getLastCommittedSegmentInfos().userData.get(Engine.MIN_RETAINED_SEQNO)),
equalTo(engine.getMinRetainedSeqNo()));
}
if (rarely()) {
engine.forceMerge(randomBoolean(), 1, false, false, false, UUIDs.randomBase64UUID());
}
try (Closeable ignored = engine.acquireHistoryRetentionLock(Engine.HistorySource.INDEX)) {
long minRetainSeqNos = engine.getMinRetainedSeqNo();
assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1));
Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new);
Set<Long> actualOps = readAllOperationsInLucene(engine, createMapperService("test")).stream()
.map(Translog.Operation::seqNo).collect(Collectors.toSet());
assertThat(actualOps, containsInAnyOrder(expectedOps));
}
try (Engine.IndexCommitRef commitRef = engine.acquireSafeIndexCommit()) {
IndexCommit safeCommit = commitRef.getIndexCommit();
if (safeCommit.getUserData().containsKey(Engine.MIN_RETAINED_SEQNO)) {
lastMinRetainedSeqNo = Long.parseLong(safeCommit.getUserData().get(Engine.MIN_RETAINED_SEQNO));
}
}
}
if (randomBoolean()) {
engine.close();
} else {
engine.flushAndClose();
}
try (InternalEngine recoveringEngine = new InternalEngine(engine.config())) {
assertThat(recoveringEngine.getMinRetainedSeqNo(), equalTo(lastMinRetainedSeqNo));
}
}
@Test
public void testLastRefreshCheckpoint() throws Exception {
AtomicBoolean done = new AtomicBoolean();
Thread[] refreshThreads = new Thread[between(1, 8)];
CountDownLatch latch = new CountDownLatch(refreshThreads.length);
for (int i = 0; i < refreshThreads.length; i++) {
latch.countDown();
refreshThreads[i] = new Thread(() -> {
while (done.get() == false) {
long checkPointBeforeRefresh = engine.getProcessedLocalCheckpoint();
engine.refresh("test", randomFrom(Engine.SearcherScope.values()), true);
assertThat(engine.lastRefreshedCheckpoint(), greaterThanOrEqualTo(checkPointBeforeRefresh));
}
});
refreshThreads[i].start();
}
latch.await();
List<Engine.Operation> ops = generateSingleDocHistory(
true, VersionType.EXTERNAL, false, 1, 10, 1000, "1");
concurrentlyApplyOps(ops, engine);
done.set(true);
for (Thread thread : refreshThreads) {
thread.join();
}
engine.refresh("test");
assertThat(engine.lastRefreshedCheckpoint(), equalTo(engine.getProcessedLocalCheckpoint()));
}
@Test
public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception {
final MapperService mapperService = createMapperService("test");
final long maxSeqNo = randomLongBetween(10, 50);
final AtomicLong refreshCounter = new AtomicLong();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(Settings.builder().
put(defaultSettings.getSettings()).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)).build());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(),
null,
new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
refreshCounter.incrementAndGet();
}
@Override
public void afterRefresh(boolean didRefresh) {
}
}, () -> SequenceNumbers.NO_OPS_PERFORMED))) {
for (long seqNo = 0; seqNo <= maxSeqNo; seqNo++) {
final ParsedDocument doc = testParsedDocument("id_" + seqNo, null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
engine.index(replicaIndexForDoc(doc, 1, seqNo, randomBoolean()));
}
final long initialRefreshCount = refreshCounter.get();
final Thread[] snapshotThreads = new Thread[between(1, 3)];
CountDownLatch latch = new CountDownLatch(1);
for (int i = 0; i < snapshotThreads.length; i++) {
final long min = randomLongBetween(0, maxSeqNo - 5);
final long max = randomLongBetween(min, maxSeqNo);
snapshotThreads[i] = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
@Override
protected void doRun() throws Exception {
latch.await();
Translog.Snapshot changes = engine.newChangesSnapshot("test", mapperService, min, max, true);
changes.close();
}
});
snapshotThreads[i].start();
}
latch.countDown();
for (Thread thread : snapshotThreads) {
thread.join();
}
assertThat(refreshCounter.get(), equalTo(initialRefreshCount + 1L));
assertThat(engine.lastRefreshedCheckpoint(), equalTo(maxSeqNo));
}
}
@Test
public void testAcquireSearcherOnClosingEngine() throws Exception {
engine.close();
expectThrows(AlreadyClosedException.class, () -> engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL));
}
@Test
public void testNoOpOnClosingEngine() throws Exception {
engine.close();
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
assertTrue(indexSettings.isSoftDeleteEnabled());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
engine.close();
expectThrows(AlreadyClosedException.class, () -> engine.noOp(
new Engine.NoOp(2, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), "reason")));
}
}
@Test
public void testSoftDeleteOnClosingEngine() throws Exception {
engine.close();
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
assertTrue(indexSettings.isSoftDeleteEnabled());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
engine.close();
expectThrows(AlreadyClosedException.class, () -> engine.delete(replicaDeleteForDoc("test", 42, 7, System.nanoTime())));
}
}
@Test
public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception {
engine.close();
Set<String> liveDocIds = new HashSet<>();
engine = new InternalEngine(engine.config());
assertThat(engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(-1L));
int numOps = between(1, 500);
for (int i = 0; i < numOps; i++) {
long currentMaxSeqNoOfUpdates = engine.getMaxSeqNoOfUpdatesOrDeletes();
ParsedDocument doc = createParsedDoc(Integer.toString(between(1, 100)), null);
if (randomBoolean()) {
Engine.IndexResult result = engine.index(indexForDoc(doc));
if (liveDocIds.add(doc.id()) == false) {
assertThat("update operations on primary must advance max_seq_no_of_updates",
engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, result.getSeqNo())));
} else {
assertThat("append operations should not advance max_seq_no_of_updates",
engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(currentMaxSeqNoOfUpdates));
}
} else {
Engine.DeleteResult result = engine.delete(new Engine.Delete(
doc.id(),
newUid(doc.id()),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
liveDocIds.remove(doc.id());
assertThat("delete operations on primary must advance max_seq_no_of_updates",
engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, result.getSeqNo())));
}
}
}
@Test
public void testRebuildLocalCheckpointTrackerAndVersionMap() throws Exception {
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 10000)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
Path translogPath = createTempDir();
List<Engine.Operation> operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean());
List<List<Engine.Operation>> commits = new ArrayList<>();
commits.add(new ArrayList<>());
try (Store store = createStore()) {
EngineConfig config = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get);
final List<DocIdSeqNoAndSource> docs;
try (InternalEngine engine = createEngine(config)) {
List<Engine.Operation> flushedOperations = new ArrayList<>();
for (Engine.Operation op : operations) {
flushedOperations.add(op);
applyOperation(engine, op);
if (randomBoolean()) {
engine.syncTranslog();
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
}
if (randomInt(100) < 10) {
engine.refresh("test");
}
if (randomInt(100) < 5) {
engine.flush(true, true);
flushedOperations.sort(Comparator.comparing(Engine.Operation::seqNo));
commits.add(new ArrayList<>(flushedOperations));
}
}
docs = getDocIds(engine, true);
}
List<Engine.Operation> operationsInSafeCommit = null;
for (int i = commits.size() - 1; i >= 0; i--) {
if (commits.get(i).stream().allMatch(op -> op.seqNo() <= globalCheckpoint.get())) {
operationsInSafeCommit = commits.get(i);
break;
}
}
assertThat(operationsInSafeCommit, notNullValue());
try (InternalEngine engine = new InternalEngine(config)) { // do not recover from translog
final Map<BytesRef, Engine.Operation> deletesAfterCheckpoint = new HashMap<>();
for (Engine.Operation op : operationsInSafeCommit) {
if (op instanceof Engine.NoOp == false && op.seqNo() > engine.getPersistedLocalCheckpoint()) {
deletesAfterCheckpoint.put(new Term(IdFieldMapper.NAME, Uid.encodeId(op.id())).bytes(), op);
}
}
deletesAfterCheckpoint.values().removeIf(o -> o instanceof Engine.Delete == false);
final Map<BytesRef, VersionValue> versionMap = engine.getVersionMap();
for (BytesRef uid : deletesAfterCheckpoint.keySet()) {
final VersionValue versionValue = versionMap.get(uid);
final Engine.Operation op = deletesAfterCheckpoint.get(uid);
final String msg = versionValue + " vs " +
"op[" + op.operationType() + "id=" + op.id() + " seqno=" + op.seqNo() + " term=" + op.primaryTerm() + "]";
assertThat(versionValue, instanceOf(DeleteVersionValue.class));
assertThat(msg, versionValue.seqNo, equalTo(op.seqNo()));
assertThat(msg, versionValue.term, equalTo(op.primaryTerm()));
assertThat(msg, versionValue.version, equalTo(op.version()));
}
assertThat(versionMap.keySet(), equalTo(deletesAfterCheckpoint.keySet()));
final LocalCheckpointTracker tracker = engine.getLocalCheckpointTracker();
final Set<Long> seqNosInSafeCommit = operationsInSafeCommit.stream().map(op -> op.seqNo()).collect(Collectors.toSet());
for (Engine.Operation op : operations) {
assertThat(
"seq_no=" + op.seqNo() + " max_seq_no=" + tracker.getMaxSeqNo() + " checkpoint=" + tracker.getProcessedCheckpoint(),
tracker.hasProcessed(op.seqNo()), equalTo(seqNosInSafeCommit.contains(op.seqNo())));
}
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertThat(getDocIds(engine, true), equalTo(docs));
}
}
}
@Test
public void testRequireSoftDeletesWhenAccessingChangesSnapshot() throws Exception {
try (Store store = createStore()) {
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(Settings.builder().
put(defaultSettings.getSettings()).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false)).build());
try (InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null))) {
AssertionError error = expectThrows(AssertionError.class,
() -> engine.newChangesSnapshot("test", createMapperService("test"), 0, randomNonNegativeLong(), randomBoolean()));
assertThat(error.getMessage(), containsString("does not have soft-deletes enabled"));
}
}
}
private void assertLuceneOperations(InternalEngine engine,
long expectedAppends,
long expectedUpdates,
long expectedDeletes) {
String message = "Lucene operations mismatched;" +
" appends [actual:" + engine.getNumDocAppends() + ", expected:" + expectedAppends + "]," +
" updates [actual:" + engine.getNumDocUpdates() + ", expected:" + expectedUpdates + "]," +
" deletes [actual:" + engine.getNumDocDeletes() + ", expected:" + expectedDeletes + "]";
assertThat(message, engine.getNumDocAppends(), equalTo(expectedAppends));
assertThat(message, engine.getNumDocUpdates(), equalTo(expectedUpdates));
assertThat(message, engine.getNumDocDeletes(), equalTo(expectedDeletes));
}
@Test
public void testStoreHonorsLuceneVersion() throws IOException {
for (Version createdVersion : Arrays.asList(
Version.CURRENT, VersionUtils.getPreviousMinorVersion(), VersionUtils.getFirstVersion())) {
Settings settings = Settings.builder()
.put(indexSettings())
.put(IndexMetadata.SETTING_VERSION_CREATED, createdVersion).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
ParsedDocument doc = testParsedDocument("1", null, new Document(),
new BytesArray("{}".getBytes("UTF-8")), null);
engine.index(appendOnlyPrimary(doc, false, 1));
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader());
assertEquals(createdVersion.luceneVersion.major, leafReader.getMetaData().getCreatedVersionMajor());
}
}
}
}
@Test
public void testMaxSeqNoInCommitUserData() throws Exception {
AtomicBoolean running = new AtomicBoolean(true);
Thread rollTranslog = new Thread(() -> {
while (running.get() && engine.getTranslog().currentFileGeneration() < 500) {
engine.rollTranslogGeneration(); // make adding operations to translog slower
}
});
rollTranslog.start();
Thread indexing = new Thread(() -> {
long seqNo = 0;
while (running.get() && seqNo <= 1000) {
try {
String id = Long.toString(between(1, 50));
if (randomBoolean()) {
ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 1L, seqNo, false));
} else {
engine.delete(replicaDeleteForDoc(id, 1L, seqNo, 0L));
}
seqNo++;
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
indexing.start();
int numCommits = between(5, 20);
for (int i = 0; i < numCommits; i++) {
engine.flush(false, true);
}
running.set(false);
indexing.join();
rollTranslog.join();
assertMaxSeqNoInCommitUserData(engine);
}
@Test
public void testPruneAwayDeletedButRetainedIds() throws Exception {
IOUtils.close(engine, store);
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
store = createStore(indexSettings, newDirectory());
LogDocMergePolicy policy = new LogDocMergePolicy();
policy.setMinMergeDocs(10000);
try (InternalEngine engine = createEngine(indexSettings, store, createTempDir(), policy)) {
int numDocs = between(1, 20);
logger.info("" + numDocs);
for (int i = 0; i < numDocs; i++) {
index(engine, i);
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get()));
engine.refresh("test");
// now we have 2 segments since we now added a tombstone plus the old segment with the delete
try (Searcher searcher = engine.acquireSearcher("test")) {
IndexReader reader = searcher.getIndexReader();
assertEquals(2, reader.leaves().size());
LeafReaderContext leafReaderContext = reader.leaves().get(0);
LeafReader leafReader = leafReaderContext.reader();
assertEquals("the delete and the tombstone", 1, leafReader.numDeletedDocs());
assertEquals(numDocs, leafReader.maxDoc());
Terms id = leafReader.terms("_id");
assertNotNull(id);
assertEquals("deleted IDs are NOT YET pruned away", reader.numDocs() + 1, id.size());
TermsEnum iterator = id.iterator();
assertTrue(iterator.seekExact(Uid.encodeId("0")));
}
// lets force merge the tombstone and the original segment and make sure the doc is still there but the ID term is gone
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
engine.refresh("test");
try (Searcher searcher = engine.acquireSearcher("test")) {
IndexReader reader = searcher.getIndexReader();
assertEquals(1, reader.leaves().size());
LeafReaderContext leafReaderContext = reader.leaves().get(0);
LeafReader leafReader = leafReaderContext.reader();
assertEquals("the delete and the tombstone", 2, leafReader.numDeletedDocs());
assertEquals(numDocs + 1, leafReader.maxDoc());
Terms id = leafReader.terms("_id");
if (numDocs == 1) {
assertNull(id); // everything is pruned away
assertEquals(0, leafReader.numDocs());
} else {
assertNotNull(id);
assertEquals("deleted IDs are pruned away", reader.numDocs(), id.size());
TermsEnum iterator = id.iterator();
assertFalse(iterator.seekExact(Uid.encodeId("0")));
}
}
}
}
@Test
public void testRecoverFromLocalTranslog() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
Path translogPath = createTempDir();
List<Engine.Operation> operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean());
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpoint::get);
final List<DocIdSeqNoAndSource> docs;
try (InternalEngine engine = createEngine(config)) {
for (Engine.Operation op : operations) {
applyOperation(engine, op);
if (randomBoolean()) {
engine.syncTranslog();
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
}
if (randomInt(100) < 10) {
engine.refresh("test");
}
if (randomInt(100) < 5) {
engine.flush();
}
if (randomInt(100) < 5) {
engine.forceMerge(randomBoolean(), 1, false, false, false, UUIDs.randomBase64UUID());
}
}
if (randomBoolean()) {
// engine is flushed properly before shutting down.
engine.syncTranslog();
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.flush();
}
docs = getDocIds(engine, true);
}
try (InternalEngine engine = new InternalEngine(config)) {
engine.onSettingsChanged(TimeValue.MINUS_ONE, ByteSizeValue.ZERO, 0);
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertThat(getDocIds(engine, randomBoolean()), equalTo(docs));
if (engine.getSeqNoStats(globalCheckpoint.get()).getMaxSeqNo() == globalCheckpoint.get()) {
assertThat("engine should trim all unreferenced translog after recovery",
engine.getTranslog().getMinFileGeneration(), equalTo(engine.getTranslog().currentFileGeneration()));
}
}
}
}
private Map<BytesRef, DeleteVersionValue> tombstonesInVersionMap(InternalEngine engine) {
return engine.getVersionMap().entrySet().stream()
.filter(e -> e.getValue() instanceof DeleteVersionValue)
.collect(Collectors.toMap(e -> e.getKey(), e -> (DeleteVersionValue) e.getValue()));
}
@Test
public void testTreatDocumentFailureAsFatalError() throws Exception {
AtomicReference<IOException> addDocException = new AtomicReference<>();
IndexWriterFactory indexWriterFactory = (dir, iwc) -> new IndexWriter(dir, iwc) {
@Override
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
final IOException ex = addDocException.getAndSet(null);
if (ex != null) {
throw ex;
}
return super.addDocument(doc);
}
};
try (Store store = createStore();
InternalEngine engine = createEngine(defaultSettings,
store,
createTempDir(),
NoMergePolicy.INSTANCE,
indexWriterFactory)) {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
Engine.Operation.Origin origin = randomFrom(REPLICA, LOCAL_RESET, PEER_RECOVERY);
Engine.Index index = new Engine.Index(
newUid(doc),
doc,
randomNonNegativeLong(),
primaryTerm.get(),
randomNonNegativeLong(),
null,
origin,
System.nanoTime(),
-1,
false,
UNASSIGNED_SEQ_NO,
UNASSIGNED_PRIMARY_TERM);
addDocException.set(new IOException("simulated"));
expectThrows(IOException.class, () -> engine.index(index));
assertTrue(engine.isClosed.get());
assertNotNull(engine.failedEngine.get());
}
}
/**
* We can trim translog on primary promotion and peer recovery based on the fact we add operations with either
* REPLICA or PEER_RECOVERY origin to translog although they already exist in the engine (i.e. hasProcessed() == true).
* If we decide not to add those already-processed operations to translog, we need to study carefully the consequence
* of the translog trimming in these two places.
*/
@Test
public void testAlwaysRecordReplicaOrPeerRecoveryOperationsToTranslog() throws Exception {
List<Engine.Operation> operations = generateHistoryOnReplica(between(1, 100), randomBoolean(), randomBoolean());
applyOperations(engine, operations);
Set<Long> seqNos = operations.stream().map(Engine.Operation::seqNo).collect(Collectors.toSet());
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(operations.size()));
assertThat(TestTranslog.drainSnapshot(snapshot, false).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()),
equalTo(seqNos));
}
primaryTerm.set(randomLongBetween(primaryTerm.get(), Long.MAX_VALUE));
engine.rollTranslogGeneration();
engine.trimOperationsFromTranslog(primaryTerm.get(), NO_OPS_PERFORMED); // trim everything in translog
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(0));
assertNull(snapshot.next());
}
applyOperations(engine, operations);
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(operations.size()));
assertThat(TestTranslog.drainSnapshot(snapshot, false).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()),
equalTo(seqNos));
}
}
@Test
public void testNoOpFailure() throws IOException {
engine.close();
final Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
try (Store store = createStore();
Engine engine = createEngine((dir, iwc) -> new IndexWriter(dir, iwc) {
@Override
public long addDocument(Iterable<? extends IndexableField> doc) {
throw new IllegalArgumentException("fatal");
}
}, null, null, config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
final Engine.NoOp op = new Engine.NoOp(0, 0, PRIMARY, System.currentTimeMillis(), "test");
final IllegalArgumentException e = expectThrows(IllegalArgumentException. class, () -> engine.noOp(op));
assertThat(e.getMessage(), equalTo("fatal"));
assertTrue(engine.isClosed.get());
assertThat(engine.failedEngine.get(), not(nullValue()));
assertThat(engine.failedEngine.get(), instanceOf(IllegalArgumentException.class));
assertThat(engine.failedEngine.get().getMessage(), equalTo("fatal"));
}
}
@Test
public void testDeleteFailureSoftDeletesEnabledDocAlreadyDeleted() throws IOException {
runTestDeleteFailure(true, InternalEngine::delete);
}
@Test
public void testDeleteFailureSoftDeletesEnabled() throws IOException {
runTestDeleteFailure(true, (engine, op) -> {});
}
@Test
public void testDeleteFailureSoftDeletesDisabled() throws IOException {
runTestDeleteFailure(false, (engine, op) -> {});
}
private void runTestDeleteFailure(
final boolean softDeletesEnabled,
final CheckedBiConsumer<InternalEngine, Engine.Delete, IOException> consumer) throws IOException {
engine.close();
final Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), softDeletesEnabled).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
final AtomicReference<ThrowingIndexWriter> iw = new AtomicReference<>();
try (Store store = createStore();
InternalEngine engine = createEngine(
(dir, iwc) -> {
iw.set(new ThrowingIndexWriter(dir, iwc));
return iw.get();
},
null,
null,
config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null)
)) {
engine.index(new Engine.Index(newUid("0"), primaryTerm.get(), InternalEngineTests.createParsedDoc("0", null)));
final Engine.Delete op = new Engine.Delete("0", newUid("0"), primaryTerm.get());
consumer.accept(engine, op);
iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal"));
final IllegalArgumentException e = expectThrows(IllegalArgumentException. class, () -> engine.delete(op));
assertThat(e.getMessage(), equalTo("fatal"));
assertTrue(engine.isClosed.get());
assertThat(engine.failedEngine.get(), not(nullValue()));
assertThat(engine.failedEngine.get(), instanceOf(IllegalArgumentException.class));
assertThat(engine.failedEngine.get().getMessage(), equalTo("fatal"));
}
}
@Test
public void testRealtimeGetOnlyRefreshIfNeeded() throws Exception {
final AtomicInteger refreshCount = new AtomicInteger();
final ReferenceManager.RefreshListener refreshListener = new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
}
@Override
public void afterRefresh(boolean didRefresh) {
if (didRefresh) {
refreshCount.incrementAndGet();
}
}
};
try (Store store = createStore()) {
final EngineConfig config = config(
defaultSettings,
store,
createTempDir(),
newMergePolicy(),
null,
refreshListener,
null,
null
);
try (InternalEngine engine = createEngine(config)) {
int numDocs = randomIntBetween(10, 100);
Set<String> ids = new HashSet<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
engine.index(indexForDoc(createParsedDoc(id, null)));
ids.add(id);
}
final int refreshCountBeforeGet = refreshCount.get();
Thread[] getters = new Thread[randomIntBetween(1, 4)];
Phaser phaser = new Phaser(getters.length + 1);
for (int t = 0; t < getters.length; t++) {
getters[t] = new Thread(() -> {
phaser.arriveAndAwaitAdvance();
int iters = randomIntBetween(1, 10);
for (int i = 0; i < iters; i++) {
ParsedDocument doc = createParsedDoc(randomFrom(ids), null);
try (Engine.GetResult getResult = engine.get(newGet(doc), engine::acquireSearcher)) {
assertThat(getResult.docIdAndVersion(), notNullValue());
}
}
});
getters[t].start();
}
phaser.arriveAndAwaitAdvance();
for (int i = 0; i < numDocs; i++) {
engine.index(indexForDoc(createParsedDoc("more-" + i, null)));
}
for (Thread getter : getters) {
getter.join();
}
assertThat(refreshCount.get(), lessThanOrEqualTo(refreshCountBeforeGet + 1));
}
}
}
@Test
public void testRefreshDoesNotBlockClosing() throws Exception {
final CountDownLatch refreshStarted = new CountDownLatch(1);
final CountDownLatch engineClosed = new CountDownLatch(1);
final ReferenceManager.RefreshListener refreshListener = new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
refreshStarted.countDown();
try {
engineClosed.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
}
@Override
public void afterRefresh(boolean didRefresh) {
assertFalse(didRefresh);
}
};
try (Store store = createStore()) {
final EngineConfig config = config(
defaultSettings,
store,
createTempDir(),
newMergePolicy(),
null,
refreshListener,
null,
null
);
try (InternalEngine engine = createEngine(config)) {
if (randomBoolean()) {
engine.index(indexForDoc(createParsedDoc("id", null)));
}
threadPool.executor(ThreadPool.Names.REFRESH).execute(() ->
expectThrows(AlreadyClosedException.class,
() -> engine.refresh("test", randomFrom(Engine.SearcherScope.values()), true)));
refreshStarted.await();
engine.close();
engineClosed.countDown();
}
}
}
@Test
public void testDeleteDocumentFailuresShouldFailEngine() throws IOException {
engine.close();
final Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
final AtomicReference<ThrowingIndexWriter> iw = new AtomicReference<>();
try (Store store = createStore();
InternalEngine engine = createEngine(
(dir, iwc) -> {
iw.set(new ThrowingIndexWriter(dir, iwc));
return iw.get();
},
null,
null,
config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
engine.index(new Engine.Index(
newUid("0"), InternalEngineTests.createParsedDoc("0", null), UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
Engine.Delete op = new Engine.Delete(
"0",
newUid("0"),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal"));
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op));
assertThat(e.getMessage(), equalTo("fatal"));
assertThat(engine.isClosed.get(), is(true));
assertThat(engine.failedEngine.get(), not(nullValue()));
assertThat(engine.failedEngine.get(), instanceOf(IllegalArgumentException.class));
assertThat(engine.failedEngine.get().getMessage(), equalTo("fatal"));
}
}
public static <T> void assertThatIfAssertionEnabled(T actual, Matcher<? super T> matcher) {
if (InternalEngineTests.class.desiredAssertionStatus()) {
assertThat(actual, matcher);
}
}
@Test
public void testProducesStoredFieldsReader() throws Exception {
// Make sure that the engine produces a SequentialStoredFieldsLeafReader.
// This is required for optimizations on SourceLookup to work, which is in-turn useful for runtime fields.
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = randomBoolean() ?
appendOnlyPrimary(doc, false, 1)
: appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
engine.index(operation);
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
IndexReader reader = searcher.getIndexReader();
assertThat(reader.leaves().size(), Matchers.greaterThanOrEqualTo(1));
for (LeafReaderContext context: reader.leaves()) {
assertThat(context.reader(), Matchers.instanceOf(SequentialStoredFieldsLeafReader.class));
SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) context.reader();
assertNotNull(lf.getSequentialStoredFieldsReader());
}
}
}
}
| server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.engine;
import static java.util.Collections.shuffle;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_RESET;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.LOCAL_TRANSLOG_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PEER_RECOVERY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.PRIMARY;
import static org.elasticsearch.index.engine.Engine.Operation.Origin.REPLICA;
import static org.elasticsearch.index.seqno.SequenceNumbers.NO_OPS_PERFORMED;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_PRIMARY_TERM;
import static org.elasticsearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO;
import static org.elasticsearch.index.translog.TranslogDeletionPolicies.createTranslogDeletionPolicy;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasKey;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.isIn;
import static org.hamcrest.Matchers.lessThanOrEqualTo;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.nullValue;
import static org.hamcrest.Matchers.sameInstance;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.when;
import java.io.Closeable;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.nio.charset.Charset;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Queue;
import java.util.Set;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.Phaser;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.LongSupplier;
import java.util.function.Supplier;
import java.util.function.ToLongBiFunction;
import java.util.stream.Collectors;
import java.util.stream.LongStream;
import com.carrotsearch.randomizedtesting.generators.RandomNumbers;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.filter.RegexFilter;
import org.apache.lucene.codecs.lucene87.Lucene87StoredFieldsFormat;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.LongPoint;
import org.apache.lucene.document.NumericDocValuesField;
import org.apache.lucene.document.StoredField;
import org.apache.lucene.document.TextField;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.IndexWriterConfig;
import org.apache.lucene.index.IndexableField;
import org.apache.lucene.index.LeafReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.LiveIndexWriterConfig;
import org.apache.lucene.index.LogByteSizeMergePolicy;
import org.apache.lucene.index.LogDocMergePolicy;
import org.apache.lucene.index.MergePolicy;
import org.apache.lucene.index.NoMergePolicy;
import org.apache.lucene.index.NumericDocValues;
import org.apache.lucene.index.PointValues;
import org.apache.lucene.index.SegmentInfos;
import org.apache.lucene.index.SoftDeletesRetentionMergePolicy;
import org.apache.lucene.index.Term;
import org.apache.lucene.index.Terms;
import org.apache.lucene.index.TermsEnum;
import org.apache.lucene.index.TieredMergePolicy;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.ReferenceManager;
import org.apache.lucene.search.TermQuery;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TotalHitCountCollector;
import org.apache.lucene.store.AlreadyClosedException;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.MockDirectoryWrapper;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.FixedBitSet;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.Version;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.TransportActions;
import org.elasticsearch.cluster.metadata.IndexMetadata;
import org.elasticsearch.cluster.routing.IndexShardRoutingTable;
import org.elasticsearch.cluster.routing.ShardRouting;
import org.elasticsearch.cluster.routing.ShardRoutingState;
import org.elasticsearch.cluster.routing.TestShardRouting;
import org.elasticsearch.common.CheckedBiConsumer;
import org.elasticsearch.common.CheckedRunnable;
import org.elasticsearch.common.Randomness;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.TriFunction;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.lucene.Lucene;
import org.elasticsearch.common.lucene.index.ElasticsearchDirectoryReader;
import org.elasticsearch.common.lucene.index.SequentialStoredFieldsLeafReader;
import org.elasticsearch.common.lucene.uid.Versions;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver;
import org.elasticsearch.common.lucene.uid.VersionsAndSeqNoResolver.DocIdAndSeqNo;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.VersionType;
import org.elasticsearch.index.codec.CodecService;
import org.elasticsearch.index.engine.Engine.Searcher;
import org.elasticsearch.index.fieldvisitor.FieldsVisitor;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.ParseContext;
import org.elasticsearch.index.mapper.ParseContext.Document;
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SeqNoFieldMapper;
import org.elasticsearch.index.mapper.SourceFieldMapper;
import org.elasticsearch.index.mapper.Uid;
import org.elasticsearch.index.mapper.VersionFieldMapper;
import org.elasticsearch.index.seqno.LocalCheckpointTracker;
import org.elasticsearch.index.seqno.ReplicationTracker;
import org.elasticsearch.index.seqno.RetentionLease;
import org.elasticsearch.index.seqno.RetentionLeases;
import org.elasticsearch.index.seqno.SeqNoStats;
import org.elasticsearch.index.seqno.SequenceNumbers;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.index.shard.ShardUtils;
import org.elasticsearch.index.store.Store;
import org.elasticsearch.index.translog.SnapshotMatchers;
import org.elasticsearch.index.translog.TestTranslog;
import org.elasticsearch.index.translog.Translog;
import org.elasticsearch.index.translog.TranslogConfig;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.test.IndexSettingsModule;
import org.elasticsearch.test.VersionUtils;
import org.hamcrest.Matcher;
import org.elasticsearch.threadpool.ThreadPool;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test;
import io.crate.common.collections.Tuple;
import io.crate.common.io.IOUtils;
import io.crate.common.unit.TimeValue;
import java.util.function.IntSupplier;
public class InternalEngineTests extends EngineTestCase {
static final long UNSET_AUTO_GENERATED_TIMESTAMP = -1L;
@Test
public void testVersionMapAfterAutoIDDocument() throws IOException {
engine.refresh("warm_up");
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = randomBoolean() ?
appendOnlyPrimary(doc, false, 1)
: appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
engine.index(operation);
assertFalse(engine.isSafeAccessRequired());
doc = testParsedDocument("1", null, testDocumentWithTextField("updated"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index update = indexForDoc(doc);
engine.index(update);
assertTrue(engine.isSafeAccessRequired());
assertThat(engine.getVersionMap().values(), hasSize(1));
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(0, searcher.getIndexReader().numDocs());
}
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
assertEquals(1, searcher.getIndexReader().numDocs());
TopDocs search = searcher.search(new MatchAllDocsQuery(), 1);
org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc);
assertEquals("test", luceneDoc.get("value"));
}
// now lets make this document visible
engine.refresh("test");
if (randomBoolean()) { // random empty refresh
engine.refresh("test");
}
assertTrue("safe access should be required we carried it over", engine.isSafeAccessRequired());
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(1, searcher.getIndexReader().numDocs());
TopDocs search = searcher.search(new MatchAllDocsQuery(), 1);
org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[0].doc);
assertEquals("updated", luceneDoc.get("value"));
}
doc = testParsedDocument("2", null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
operation = randomBoolean() ?
appendOnlyPrimary(doc, false, 1)
: appendOnlyReplica(doc, false, 1, generateNewSeqNo(engine));
engine.index(operation);
assertTrue("safe access should be required", engine.isSafeAccessRequired());
assertThat(engine.getVersionMap().values(), hasSize(1)); // now we add this to the map
engine.refresh("test");
if (randomBoolean()) { // randomly refresh here again
engine.refresh("test");
}
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(2, searcher.getIndexReader().numDocs());
}
if (operation.origin() == PRIMARY) {
assertFalse("safe access should NOT be required last indexing round was only append only", engine.isSafeAccessRequired());
}
engine.delete(new Engine.Delete(
operation.id(),
operation.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
assertTrue("safe access should be required", engine.isSafeAccessRequired());
engine.refresh("test");
assertTrue("safe access should be required", engine.isSafeAccessRequired());
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(1, searcher.getIndexReader().numDocs());
}
}
@Test
public void testSegmentsWithoutSoftDeletes() throws Exception {
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
List<Segment> segments = engine.segments(false);
assertThat(segments.isEmpty(), equalTo(true));
// create two docs and refresh
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
Engine.Index first = indexForDoc(doc);
Engine.IndexResult firstResult = engine.index(first);
ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_2, null);
Engine.Index second = indexForDoc(doc2);
Engine.IndexResult secondResult = engine.index(second);
assertThat(secondResult.getTranslogLocation(), greaterThan(firstResult.getTranslogLocation()));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).isCommitted(), equalTo(false));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(0).ramTree, nullValue());
assertThat(segments.get(0).getAttributes().keySet(), Matchers.contains(Lucene87StoredFieldsFormat.MODE_KEY));
engine.flush();
segments = engine.segments(false);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
ParsedDocument doc3 = testParsedDocument("3", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc3));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(2));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
ParsedDocument doc4 = testParsedDocument("4", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc4));
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
// internal refresh - lets make sure we see those segments in the stats
ParsedDocument doc5 = testParsedDocument("5", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc5));
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
segments = engine.segments(false);
assertThat(segments.size(), equalTo(4));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
assertThat(segments.get(3).isCommitted(), equalTo(false));
assertThat(segments.get(3).isSearch(), equalTo(false));
assertThat(segments.get(3).getNumDocs(), equalTo(1));
assertThat(segments.get(3).getDeletedDocs(), equalTo(0));
assertThat(segments.get(3).isCompound(), equalTo(true));
// now refresh the external searcher and make sure it has the new segment
engine.refresh("test");
segments = engine.segments(false);
assertThat(segments.size(), equalTo(4));
assertThat(segments.get(0).getGeneration() < segments.get(1).getGeneration(), equalTo(true));
assertThat(segments.get(0).isCommitted(), equalTo(true));
assertThat(segments.get(0).isSearch(), equalTo(true));
assertThat(segments.get(0).getNumDocs(), equalTo(1));
assertThat(segments.get(0).getDeletedDocs(), equalTo(1));
assertThat(segments.get(0).isCompound(), equalTo(true));
assertThat(segments.get(1).isCommitted(), equalTo(false));
assertThat(segments.get(1).isSearch(), equalTo(true));
assertThat(segments.get(1).getNumDocs(), equalTo(1));
assertThat(segments.get(1).getDeletedDocs(), equalTo(0));
assertThat(segments.get(1).isCompound(), equalTo(true));
assertThat(segments.get(2).isCommitted(), equalTo(false));
assertThat(segments.get(2).isSearch(), equalTo(true));
assertThat(segments.get(2).getNumDocs(), equalTo(1));
assertThat(segments.get(2).getDeletedDocs(), equalTo(0));
assertThat(segments.get(2).isCompound(), equalTo(true));
assertThat(segments.get(3).isCommitted(), equalTo(false));
assertThat(segments.get(3).isSearch(), equalTo(true));
assertThat(segments.get(3).getNumDocs(), equalTo(1));
assertThat(segments.get(3).getDeletedDocs(), equalTo(0));
assertThat(segments.get(3).isCompound(), equalTo(true));
}
}
@Test
public void testVerboseSegments() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) {
List<Segment> segments = engine.segments(true);
assertThat(segments.isEmpty(), equalTo(true));
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
segments = engine.segments(true);
assertThat(segments.size(), equalTo(1));
assertThat(segments.get(0).ramTree, notNullValue());
ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_2, null);
engine.index(indexForDoc(doc2));
engine.refresh("test");
ParsedDocument doc3 = testParsedDocument("3", null, testDocumentWithTextField(), B_3, null);
engine.index(indexForDoc(doc3));
engine.refresh("test");
segments = engine.segments(true);
assertThat(segments.size(), equalTo(3));
assertThat(segments.get(0).ramTree, notNullValue());
assertThat(segments.get(1).ramTree, notNullValue());
assertThat(segments.get(2).ramTree, notNullValue());
}
}
@Test
public void testSegmentsWithMergeFlag() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new TieredMergePolicy())) {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index index = indexForDoc(doc);
engine.index(index);
engine.flush();
assertThat(engine.segments(false).size(), equalTo(1));
index = indexForDoc(testParsedDocument("2", null, testDocument(), B_1, null));
engine.index(index);
engine.flush();
List<Segment> segments = engine.segments(false);
assertThat(segments.size(), equalTo(2));
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = indexForDoc(testParsedDocument("3", null, testDocument(), B_1, null));
engine.index(index);
engine.flush();
segments = engine.segments(false);
assertThat(segments.size(), equalTo(3));
for (Segment segment : segments) {
assertThat(segment.getMergeId(), nullValue());
}
index = indexForDoc(doc);
engine.index(index);
engine.flush();
final long gen1 = store.readLastCommittedSegmentsInfo().getGeneration();
// now, optimize and wait for merges, see that we have no merge flag
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
for (Segment segment : engine.segments(false)) {
assertThat(segment.getMergeId(), nullValue());
}
// we could have multiple underlying merges, so the generation may increase more than once
assertTrue(store.readLastCommittedSegmentsInfo().getGeneration() > gen1);
final boolean flush = randomBoolean();
final long gen2 = store.readLastCommittedSegmentsInfo().getGeneration();
engine.forceMerge(flush, 1, false, false, false, UUIDs.randomBase64UUID());
for (Segment segment : engine.segments(false)) {
assertThat(segment.getMergeId(), nullValue());
}
if (flush) {
// we should have had just 1 merge, so last generation should be exact
assertEquals(gen2, store.readLastCommittedSegmentsInfo().getLastGeneration());
}
}
}
@Test
public void testSegmentsWithSoftDeletes() throws Exception {
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null,
null, globalCheckpoint::get))) {
assertThat(engine.segments(false), empty());
int numDocsFirstSegment = randomIntBetween(5, 50);
Set<String> liveDocsFirstSegment = new HashSet<>();
for (int i = 0; i < numDocsFirstSegment; i++) {
String id = Integer.toString(i);
ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocsFirstSegment.add(id);
}
engine.refresh("test");
List<Segment> segments = engine.segments(randomBoolean());
assertThat(segments, hasSize(1));
assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
assertThat(segments.get(0).getDeletedDocs(), equalTo(0));
assertFalse(segments.get(0).committed);
int deletes = 0;
int updates = 0;
int appends = 0;
int iterations = scaledRandomIntBetween(1, 50);
for (int i = 0; i < iterations && liveDocsFirstSegment.isEmpty() == false; i++) {
String idToUpdate = randomFrom(liveDocsFirstSegment);
liveDocsFirstSegment.remove(idToUpdate);
ParsedDocument doc = testParsedDocument(idToUpdate, null, testDocument(), B_1, null);
if (randomBoolean()) {
engine.delete(new Engine.Delete(doc.id(), newUid(doc), primaryTerm.get()));
deletes++;
} else {
engine.index(indexForDoc(doc));
updates++;
}
if (randomBoolean()) {
engine.index(indexForDoc(testParsedDocument(UUIDs.randomBase64UUID(), null, testDocument(), B_1, null)));
appends++;
}
}
boolean committed = randomBoolean();
if (committed) {
engine.flush();
}
engine.refresh("test");
segments = engine.segments(randomBoolean());
assertThat(segments, hasSize(2));
assertThat(segments.get(0).getNumDocs(), equalTo(liveDocsFirstSegment.size()));
assertThat(segments.get(0).getDeletedDocs(), equalTo(updates + deletes));
assertThat(segments.get(0).committed, equalTo(committed));
assertThat(segments.get(1).getNumDocs(), equalTo(updates + appends));
assertThat(segments.get(1).getDeletedDocs(), equalTo(deletes)); // delete tombstones
assertThat(segments.get(1).committed, equalTo(committed));
}
}
@Test
public void testCommitStats() throws IOException {
final AtomicLong maxSeqNo = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong localCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong globalCheckpoint = new AtomicLong(UNASSIGNED_SEQ_NO);
try (
Store store = createStore();
InternalEngine engine = createEngine(store, createTempDir(), (maxSeq, localCP) -> new LocalCheckpointTracker(
maxSeq,
localCP) {
@Override
public long getMaxSeqNo() {
return maxSeqNo.get();
}
@Override
public long getProcessedCheckpoint() {
return localCheckpoint.get();
}
}
)) {
CommitStats stats1 = engine.commitStats();
assertThat(stats1.getGeneration(), greaterThan(0L));
assertThat(stats1.getId(), notNullValue());
assertThat(stats1.getUserData(), hasKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
assertThat(
Long.parseLong(stats1.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
equalTo(SequenceNumbers.NO_OPS_PERFORMED));
assertThat(stats1.getUserData(), hasKey(SequenceNumbers.MAX_SEQ_NO));
assertThat(
Long.parseLong(stats1.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
equalTo(SequenceNumbers.NO_OPS_PERFORMED));
maxSeqNo.set(rarely() ? SequenceNumbers.NO_OPS_PERFORMED : randomIntBetween(0, 1024));
localCheckpoint.set(
rarely() || maxSeqNo.get() == SequenceNumbers.NO_OPS_PERFORMED ?
SequenceNumbers.NO_OPS_PERFORMED : randomIntBetween(0, 1024));
globalCheckpoint.set(rarely() || localCheckpoint.get() == SequenceNumbers.NO_OPS_PERFORMED ?
UNASSIGNED_SEQ_NO : randomIntBetween(0, (int) localCheckpoint.get()));
final Engine.CommitId commitId = engine.flush(true, true);
CommitStats stats2 = engine.commitStats();
assertThat(stats2.getRawCommitId(), equalTo(commitId));
assertThat(stats2.getGeneration(), greaterThan(stats1.getGeneration()));
assertThat(stats2.getId(), notNullValue());
assertThat(stats2.getId(), not(equalTo(stats1.getId())));
assertThat(stats2.getUserData(), hasKey(Translog.TRANSLOG_UUID_KEY));
assertThat(stats2.getUserData().get(Translog.TRANSLOG_UUID_KEY),
equalTo(stats1.getUserData().get(Translog.TRANSLOG_UUID_KEY)));
assertThat(Long.parseLong(stats2.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)), equalTo(localCheckpoint.get()));
assertThat(stats2.getUserData(), hasKey(SequenceNumbers.MAX_SEQ_NO));
assertThat(Long.parseLong(stats2.getUserData().get(SequenceNumbers.MAX_SEQ_NO)), equalTo(maxSeqNo.get()));
}
}
@Test
public void testFlushIsDisabledDuringTranslogRecovery() throws IOException {
engine.ensureCanFlush(); // recovered already
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
engine.close();
engine = new InternalEngine(engine.config());
expectThrows(IllegalStateException.class, engine::ensureCanFlush);
expectThrows(IllegalStateException.class, () -> engine.flush(true, true));
if (randomBoolean()) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
} else {
engine.skipTranslogRecovery();
}
engine.ensureCanFlush(); // ready
doc = testParsedDocument("2", null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
engine.flush();
}
@Test
public void testTranslogMultipleOperationsSameDocument() throws IOException {
final int ops = randomIntBetween(1, 32);
Engine initialEngine;
final List<Engine.Operation> operations = new ArrayList<>();
try {
initialEngine = engine;
for (int i = 0; i < ops; i++) {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
if (randomBoolean()) {
final Engine.Index operation = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
i,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
-1,
false,
UNASSIGNED_SEQ_NO,
0
);
operations.add(operation);
initialEngine.index(operation);
} else {
final Engine.Delete operation = new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
0,
i,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
operations.add(operation);
initialEngine.delete(operation);
}
}
} finally {
IOUtils.close(engine);
}
try (Engine recoveringEngine = new InternalEngine(engine.config())) {
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
recoveringEngine.refresh("test");
try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector);
assertThat(collector.getTotalHits(), equalTo(operations.get(operations.size() - 1) instanceof Engine.Delete ? 0 : 1));
}
}
}
@Test
public void testTranslogRecoveryDoesNotReplayIntoTranslog() throws IOException {
final int docs = randomIntBetween(1, 32);
Engine initialEngine = null;
try {
initialEngine = engine;
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
initialEngine.index(indexForDoc(doc));
}
} finally {
IOUtils.close(initialEngine);
}
Engine recoveringEngine = null;
try {
final AtomicBoolean committed = new AtomicBoolean();
recoveringEngine = new InternalEngine(initialEngine.config()) {
@Override
protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException {
committed.set(true);
super.commitIndexWriter(writer, translog, syncId);
}
};
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertTrue(committed.get());
} finally {
IOUtils.close(recoveringEngine);
}
}
@Test
public void testTranslogRecoveryWithMultipleGenerations() throws IOException {
final int docs = randomIntBetween(1, 4096);
final List<Long> seqNos = LongStream.range(0, docs).boxed().collect(Collectors.toList());
Randomness.shuffle(seqNos);
Engine initialEngine = null;
Engine recoveringEngine = null;
Store store = createStore();
final AtomicInteger counter = new AtomicInteger();
try {
initialEngine = createEngine(
store,
createTempDir(),
LocalCheckpointTracker::new,
(engine, operation) -> seqNos.get(counter.getAndIncrement()));
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
initialEngine.index(indexForDoc(doc));
if (rarely()) {
getTranslog(initialEngine).rollGeneration();
} else if (rarely()) {
initialEngine.flush();
}
}
initialEngine.close();
recoveringEngine = new InternalEngine(initialEngine.config());
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
recoveringEngine.refresh("test");
try (Engine.Searcher searcher = recoveringEngine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), docs);
assertEquals(docs, topDocs.totalHits.value);
}
} finally {
IOUtils.close(initialEngine, recoveringEngine, store);
}
}
@Test
public void testRecoveryFromTranslogUpToSeqNo() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(),
null, null, globalCheckpoint::get);
final long maxSeqNo;
try (InternalEngine engine = createEngine(config)) {
final int docs = randomIntBetween(1, 100);
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(),
SOURCE, null);
engine.index(indexForDoc(doc));
if (rarely()) {
engine.rollTranslogGeneration();
} else if (rarely()) {
engine.flush(randomBoolean(), true);
}
}
maxSeqNo = engine.getLocalCheckpointTracker().getMaxSeqNo();
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getProcessedLocalCheckpoint()));
engine.syncTranslog();
}
try (InternalEngine engine = new InternalEngine(config)) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertThat(engine.getProcessedLocalCheckpoint(), equalTo(maxSeqNo));
assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(maxSeqNo));
}
try (InternalEngine engine = new InternalEngine(config)) {
long upToSeqNo = randomLongBetween(globalCheckpoint.get(), maxSeqNo);
engine.recoverFromTranslog(translogHandler, upToSeqNo);
assertThat(engine.getProcessedLocalCheckpoint(), equalTo(upToSeqNo));
assertThat(engine.getLocalCheckpointTracker().getMaxSeqNo(), equalTo(upToSeqNo));
}
}
}
@Test
public void testConcurrentGetAndFlush() throws Exception {
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
final AtomicReference<Engine.GetResult> latestGetResult = new AtomicReference<>();
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
latestGetResult.set(engine.get(newGet(doc), searcherFactory));
final AtomicBoolean flushFinished = new AtomicBoolean(false);
final CyclicBarrier barrier = new CyclicBarrier(2);
Thread getThread = new Thread(() -> {
try {
barrier.await();
} catch (InterruptedException | BrokenBarrierException e) {
throw new RuntimeException(e);
}
while (flushFinished.get() == false) {
Engine.GetResult previousGetResult = latestGetResult.get();
if (previousGetResult != null) {
previousGetResult.close();
}
latestGetResult.set(engine.get(newGet(doc), searcherFactory));
if (latestGetResult.get().docIdAndVersion() == null) {
break;
}
}
});
getThread.start();
barrier.await();
engine.flush();
flushFinished.set(true);
getThread.join();
assertThat(latestGetResult.get().docIdAndVersion(), is(notNullValue()));
latestGetResult.get().close();
}
@Test
public void testSimpleOperations() throws Exception {
engine.refresh("warm_up");
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
searchResult.close();
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
searchResult.close();
// we can get it in realtime
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_2), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_2, null);
engine.index(indexForDoc(doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// but, we can still get it (in realtime)
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
// refresh and it should be updated
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
// now delete
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// its not deleted yet
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
// but, get should not see it (in realtime)
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(nullValue()));
}
// refresh and it should be deleted
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// add it back
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// now flush
engine.flush();
// and, verify get (in real time)
try (Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
// make sure we can still work with the engine
// now do an update
document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
// its not updated yet...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 0));
searchResult.close();
// refresh and it should be updated
engine.refresh("test");
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test1")), 1));
searchResult.close();
}
public void testSearchResultRelease() throws Exception {
engine.refresh("warm_up");
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
searchResult.close();
// create a document
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
// its not there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 0));
searchResult.close();
// refresh and it should be there
engine.refresh("test");
// now its there...
searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
// don't release the search result yet...
// delete, refresh and do a new search, it should not be there
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
engine.refresh("test");
Engine.Searcher updateSearchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(updateSearchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(0));
updateSearchResult.close();
// the non release search result should not see the deleted yet...
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(new TermQuery(new Term("value", "test")), 1));
searchResult.close();
}
@Test
public void testCommitAdvancesMinTranslogForRecovery() throws IOException {
IOUtils.close(engine, store);
final Path translogPath = createTempDir();
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final LongSupplier globalCheckpointSupplier = () -> globalCheckpoint.get();
engine = createEngine(config(defaultSettings, store, translogPath, newMergePolicy(), null, null,
globalCheckpointSupplier));
engine.onSettingsChanged(TimeValue.MINUS_ONE, ByteSizeValue.ZERO, randomNonNegativeLong());
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
boolean inSync = randomBoolean();
if (inSync) {
engine.syncTranslog(); // to advance persisted local checkpoint
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
}
engine.flush();
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(inSync ? 3L : 2L));
engine.flush();
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(inSync ? 3L : 2L));
engine.flush(true, true);
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(inSync ? 3L : 2L));
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.flush(true, true);
assertThat(engine.getTranslog().currentFileGeneration(), equalTo(3L));
assertThat(engine.getTranslog().getMinFileGeneration(), equalTo(3L));
}
@Test
public void testSyncedFlush() throws IOException {
try (Store store = createStore();
Engine engine = createEngine(defaultSettings, store, createTempDir(), new LogByteSizeMergePolicy(), null)) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
Engine.CommitId commitID = engine.flush();
assertThat(commitID, equalTo(new Engine.CommitId(store.readLastCommittedSegmentsInfo().getId())));
byte[] wrongBytes = Base64.getDecoder().decode(commitID.toString());
wrongBytes[0] = (byte) ~wrongBytes[0];
Engine.CommitId wrongId = new Engine.CommitId(wrongBytes);
assertEquals("should fail to sync flush with wrong id (but no docs)", engine.syncFlush(syncId + "1", wrongId),
Engine.SyncedFlushResult.COMMIT_MISMATCH);
engine.index(indexForDoc(doc));
assertEquals("should fail to sync flush with right id but pending doc",
engine.syncFlush(syncId + "2", commitID), Engine.SyncedFlushResult.PENDING_OPERATIONS);
commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
}
}
@Test
public void testRenewSyncFlush() throws Exception {
final int iters = randomIntBetween(2, 5); // run this a couple of times to get some coverage
for (int i = 0; i < iters; i++) {
try (Store store = createStore();
InternalEngine engine =
createEngine(config(defaultSettings, store, createTempDir(), new LogDocMergePolicy(), null))) {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
Engine.Index doc1 =
indexForDoc(testParsedDocument("1", null, testDocumentWithTextField(), B_1, null));
engine.index(doc1);
assertEquals(engine.getLastWriteNanos(), doc1.startTime());
engine.flush();
Engine.Index doc2 =
indexForDoc(testParsedDocument("2", null, testDocumentWithTextField(), B_1, null));
engine.index(doc2);
assertEquals(engine.getLastWriteNanos(), doc2.startTime());
engine.flush();
final boolean forceMergeFlushes = randomBoolean();
final ParsedDocument parsedDoc3 =
testParsedDocument("3", null, testDocumentWithTextField(), B_1, null);
if (forceMergeFlushes) {
engine.index(new Engine.Index(newUid(parsedDoc3), parsedDoc3, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY,
System.nanoTime() - engine.engineConfig.getFlushMergesAfter().nanos(),
-1, false, UNASSIGNED_SEQ_NO, 0));
} else {
engine.index(indexForDoc(parsedDoc3));
}
Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(3, engine.segments(false).size());
engine.forceMerge(forceMergeFlushes, 1, false, false, false, UUIDs.randomBase64UUID());
if (forceMergeFlushes == false) {
engine.refresh("make all segments visible");
assertEquals(4, engine.segments(false).size());
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertTrue(engine.tryRenewSyncCommit());
assertEquals(1, engine.segments(false).size());
} else {
engine.refresh("test");
assertBusy(() -> assertEquals(1, engine.segments(false).size()));
}
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
if (randomBoolean()) {
Engine.Index doc4 =
indexForDoc(testParsedDocument("4", null, testDocumentWithTextField(), B_1, null));
engine.index(doc4);
assertEquals(engine.getLastWriteNanos(), doc4.startTime());
} else {
Engine.Delete delete = new Engine.Delete(
doc1.id(),
doc1.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
engine.delete(delete);
assertEquals(engine.getLastWriteNanos(), delete.startTime());
}
assertFalse(engine.tryRenewSyncCommit());
// we might hit a concurrent flush from a finishing merge here - just wait if ongoing...
engine.flush(false, true);
assertNull(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID));
assertNull(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID));
}
}
}
@Test
public void testSyncedFlushSurvivesEngineRestart() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
IOUtils.close(store, engine);
store = createStore();
engine = createEngine(store, primaryTranslogDir, globalCheckpoint::get);
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}"), null);
engine.index(indexForDoc(doc));
globalCheckpoint.set(0L);
final Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
EngineConfig config = engine.config();
if (randomBoolean()) {
engine.close();
} else {
engine.flushAndClose();
}
if (randomBoolean()) {
final String translogUUID = Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
UNASSIGNED_SEQ_NO, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
}
engine = new InternalEngine(config);
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
}
@Test
public void testSyncedFlushVanishesOnReplay() throws IOException {
final String syncId = randomUnicodeOfCodepointLengthBetween(10, 20);
ParsedDocument doc = testParsedDocument("1", null,
testDocumentWithTextField(), new BytesArray("{}"), null);
engine.index(indexForDoc(doc));
final Engine.CommitId commitID = engine.flush();
assertEquals("should succeed to flush commit with right id and no pending doc", engine.syncFlush(syncId, commitID),
Engine.SyncedFlushResult.SUCCESS);
assertEquals(store.readLastCommittedSegmentsInfo().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
assertEquals(engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID), syncId);
doc = testParsedDocument("2", null, testDocumentWithTextField(), new BytesArray("{}"), null);
engine.index(indexForDoc(doc));
EngineConfig config = engine.config();
engine.close();
engine = new InternalEngine(config);
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertNull("Sync ID must be gone since we have a document to replay",
engine.getLastCommittedSegmentInfos().getUserData().get(Engine.SYNC_COMMIT_ID));
}
@Test
public void testVersioningNewCreate() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
}
@Test
public void testReplicatedVersioningWithFlush() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
assertTrue(indexResult.isCreated());
create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
assertTrue(indexResult.isCreated());
if (randomBoolean()) {
engine.flush();
}
if (randomBoolean()) {
replicaEngine.flush();
}
Engine.Index update = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
1, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult updateResult = engine.index(update);
assertThat(updateResult.getVersion(), equalTo(2L));
assertFalse(updateResult.isCreated());
update = new Engine.Index(newUid(doc), doc, updateResult.getSeqNo(), update.primaryTerm(), updateResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
updateResult = replicaEngine.index(update);
assertThat(updateResult.getVersion(), equalTo(2L));
assertFalse(updateResult.isCreated());
replicaEngine.refresh("test");
try (Searcher searcher = replicaEngine.acquireSearcher("test")) {
assertEquals(1, searcher.getDirectoryReader().numDocs());
}
engine.refresh("test");
try (Searcher searcher = engine.acquireSearcher("test")) {
assertEquals(1, searcher.getDirectoryReader().numDocs());
}
}
/**
* simulates what an upsert / update API does
*/
@Test
public void testVersionedUpdate() throws IOException {
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), create.uid()), searcherFactory)) {
assertEquals(1, get.docIdAndVersion().version);
}
Engine.Index update_1 = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
1, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult update_1_result = engine.index(update_1);
assertThat(update_1_result.getVersion(), equalTo(2L));
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), create.uid()), searcherFactory)) {
assertEquals(2, get.docIdAndVersion().version);
}
Engine.Index update_2 = new Engine.Index(
newUid(doc), doc, UNASSIGNED_SEQ_NO, primaryTerm.get(),
2, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult update_2_result = engine.index(update_2);
assertThat(update_2_result.getVersion(), equalTo(3L));
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), create.uid()), searcherFactory)) {
assertEquals(3, get.docIdAndVersion().version);
}
}
@Test
public void testVersioningNewIndex() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index index = indexForDoc(doc);
Engine.IndexResult indexResult = engine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(),
null, REPLICA, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
}
/*
* we are testing an edge case here where we have a fully deleted segment that is retained but has all it's IDs pruned away.
*/
@Test
public void testLookupVersionWithPrunedAwayIds() throws IOException {
try (Directory dir = newDirectory()) {
IndexWriterConfig indexWriterConfig = new IndexWriterConfig(Lucene.STANDARD_ANALYZER);
indexWriterConfig.setSoftDeletesField(Lucene.SOFT_DELETES_FIELD);
try (IndexWriter writer = new IndexWriter(dir,
indexWriterConfig.setMergePolicy(new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD,
MatchAllDocsQuery::new, new PrunePostingsMergePolicy(indexWriterConfig.getMergePolicy(), "_id"))))) {
org.apache.lucene.document.Document doc = new org.apache.lucene.document.Document();
doc.add(new Field(IdFieldMapper.NAME, "1", IdFieldMapper.Defaults.FIELD_TYPE));
doc.add(new NumericDocValuesField(VersionFieldMapper.NAME, -1));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.NAME, 1));
doc.add(new NumericDocValuesField(SeqNoFieldMapper.PRIMARY_TERM_NAME, 1));
writer.addDocument(doc);
writer.flush();
writer.softUpdateDocument(new Term(IdFieldMapper.NAME, "1"), doc, new NumericDocValuesField(Lucene.SOFT_DELETES_FIELD, 1));
writer.updateNumericDocValue(new Term(IdFieldMapper.NAME, "1"), Lucene.SOFT_DELETES_FIELD, 1);
writer.forceMerge(1);
try (DirectoryReader reader = DirectoryReader.open(writer)) {
assertEquals(1, reader.leaves().size());
assertNull(VersionsAndSeqNoResolver.loadDocIdAndVersion(reader, new Term(IdFieldMapper.NAME, "1"), false));
}
}
}
}
@Test
public void testUpdateWithFullyDeletedSegments() throws IOException {
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), Integer.MAX_VALUE);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final Set<String> liveDocs = new HashSet<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null,
null, globalCheckpoint::get))) {
int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
}
}
@Test
public void testForceMergeWithSoftDeletesRetention() throws Exception {
final long retainedExtraOps = randomLongBetween(0, 10);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), retainedExtraOps);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final MapperService mapperService = createMapperService("test");
final Set<String> liveDocs = new HashSet<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null,
null, globalCheckpoint::get))) {
int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null);
if (randomBoolean()) {
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
liveDocs.remove(doc.id());
}
if (randomBoolean()) {
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
}
if (randomBoolean()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush();
long localCheckpoint = engine.getProcessedLocalCheckpoint();
globalCheckpoint.set(randomLongBetween(0, localCheckpoint));
engine.syncTranslog();
final long safeCommitCheckpoint;
try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) {
safeCommitCheckpoint = Long.parseLong(safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
Map<Long, Translog.Operation> ops = readAllOperationsInLucene(engine, mapperService)
.stream().collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity()));
for (long seqno = 0; seqno <= localCheckpoint; seqno++) {
long minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps,
safeCommitCheckpoint + 1);
String msg = "seq# [" + seqno + "], global checkpoint [" + globalCheckpoint + "], retained-ops [" +
retainedExtraOps + "]";
if (seqno < minSeqNoToRetain) {
Translog.Operation op = ops.get(seqno);
if (op != null) {
assertThat(op, instanceOf(Translog.Index.class));
assertThat(msg, ((Translog.Index) op).id(), isIn(liveDocs));
assertEquals(msg, ((Translog.Index) op).source(), B_1);
}
} else {
assertThat(msg, ops.get(seqno), notNullValue());
}
}
settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0);
indexSettings.updateIndexMetadata(IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(
settings).build());
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
globalCheckpoint.set(localCheckpoint);
engine.syncTranslog();
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocs.size()));
}
}
@Test
public void testForceMergeWithSoftDeletesRetentionAndRecoverySource() throws Exception {
final long retainedExtraOps = randomLongBetween(0, 10);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), retainedExtraOps);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final MapperService mapperService = createMapperService("test");
final boolean omitSourceAllTheTime = randomBoolean();
final Set<String> liveDocs = new HashSet<>();
final Set<String> liveDocsWithSource = new HashSet<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null,
null,
globalCheckpoint::get))) {
int numDocs = scaledRandomIntBetween(10, 100);
for (int i = 0; i < numDocs; i++) {
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null,
useRecoverySource);
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
if (useRecoverySource == false) {
liveDocsWithSource.add(Integer.toString(i));
}
}
for (int i = 0; i < numDocs; i++) {
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1, null,
useRecoverySource);
if (randomBoolean()) {
engine.delete(new Engine.Delete(doc.id(), newUid(doc.id()), primaryTerm.get()));
liveDocs.remove(doc.id());
liveDocsWithSource.remove(doc.id());
}
if (randomBoolean()) {
engine.index(indexForDoc(doc));
liveDocs.add(doc.id());
if (useRecoverySource == false) {
liveDocsWithSource.add(doc.id());
} else {
liveDocsWithSource.remove(doc.id());
}
}
if (randomBoolean()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush();
globalCheckpoint.set(randomLongBetween(0, engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
final long minSeqNoToRetain;
try (Engine.IndexCommitRef safeCommit = engine.acquireSafeIndexCommit()) {
long safeCommitLocalCheckpoint = Long.parseLong(
safeCommit.getIndexCommit().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
minSeqNoToRetain = Math.min(globalCheckpoint.get() + 1 - retainedExtraOps,
safeCommitLocalCheckpoint + 1);
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
Map<Long, Translog.Operation> ops = readAllOperationsInLucene(engine, mapperService)
.stream().collect(Collectors.toMap(Translog.Operation::seqNo, Function.identity()));
for (long seqno = 0; seqno <= engine.getPersistedLocalCheckpoint(); seqno++) {
String msg = "seq# [" + seqno + "], global checkpoint [" + globalCheckpoint + "], retained-ops [" +
retainedExtraOps + "]";
if (seqno < minSeqNoToRetain) {
Translog.Operation op = ops.get(seqno);
if (op != null) {
assertThat(op, instanceOf(Translog.Index.class));
assertThat(msg, ((Translog.Index) op).id(), isIn(liveDocs));
}
} else {
Translog.Operation op = ops.get(seqno);
assertThat(msg, op, notNullValue());
if (op instanceof Translog.Index) {
assertEquals(msg, ((Translog.Index) op).source(), B_1);
}
}
}
settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 0);
indexSettings.updateIndexMetadata(IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(
settings).build());
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
// If we already merged down to 1 segment, then the next force-merge will be a noop. We need to add an extra segment to make
// merges happen so we can verify that _recovery_source are pruned. See: https://github.com/elastic/elasticsearch/issues/41628.
final int numSegments;
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
numSegments = searcher.getDirectoryReader().leaves().size();
}
if (numSegments == 1) {
boolean useRecoverySource = randomBoolean() || omitSourceAllTheTime;
ParsedDocument doc = testParsedDocument("dummy", null, testDocument(), B_1, null, useRecoverySource);
engine.index(indexForDoc(doc));
if (useRecoverySource == false) {
liveDocsWithSource.add(doc.id());
}
engine.syncTranslog();
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.flush(randomBoolean(), true);
} else {
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.syncTranslog();
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
assertThat(readAllOperationsInLucene(engine, mapperService), hasSize(liveDocsWithSource.size()));
}
}
@Test
public void testForceMergeAndClose() throws IOException, InterruptedException {
int numIters = randomIntBetween(2, 10);
for (int j = 0; j < numIters; j++) {
try (Store store = createStore()) {
final InternalEngine engine = createEngine(store, createTempDir());
final CountDownLatch startGun = new CountDownLatch(1);
final CountDownLatch indexed = new CountDownLatch(1);
Thread thread = new Thread() {
@Override
public void run() {
try {
try {
startGun.await();
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
int i = 0;
while (true) {
int numDocs = randomIntBetween(1, 20);
for (int j = 0; j < numDocs; j++) {
i++;
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), B_1,
null);
Engine.Index index = indexForDoc(doc);
engine.index(index);
}
engine.refresh("test");
indexed.countDown();
try {
engine.forceMerge(
randomBoolean(),
1,
false,
randomBoolean(),
randomBoolean(),
UUIDs.randomBase64UUID()
);
} catch (IOException e) {
return;
}
}
} catch (AlreadyClosedException ex) {
// fine
} catch (IOException e) {
throw new AssertionError(e);
}
}
};
thread.start();
startGun.countDown();
int someIters = randomIntBetween(1, 10);
for (int i = 0; i < someIters; i++) {
engine.forceMerge(randomBoolean(), 1, false, randomBoolean(), randomBoolean(), UUIDs.randomBase64UUID());
}
indexed.await();
IOUtils.close(engine);
thread.join();
}
}
}
@Test
public void testVersioningCreateExistsException() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index create = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(create);
assertThat(indexResult.getVersion(), equalTo(1L));
create = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_DELETED,
VersionType.INTERNAL, PRIMARY, 0, -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = engine.index(create);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class));
}
@Test
public void testOutOfOrderDocsOnReplica() throws IOException {
final List<Engine.Operation> ops = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL, VersionType.EXTERNAL_GTE, VersionType.FORCE),
false, 2, 2, 20, "1");
assertOpsOnReplica(ops, replicaEngine, true, logger);
}
@Test
public void testConcurrentOutOfOrderDocsOnReplica() throws IOException, InterruptedException {
final List<Engine.Operation> opsDoc1 = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300, "1");
final Engine.Operation lastOpDoc1 = opsDoc1.get(opsDoc1.size() - 1);
final String lastFieldValueDoc1;
if (lastOpDoc1 instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOpDoc1;
lastFieldValueDoc1 = index.docs().get(0).get("value");
} else {
// delete
lastFieldValueDoc1 = null;
}
final List<Engine.Operation> opsDoc2 =
generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 100, 300, "2");
final Engine.Operation lastOpDoc2 = opsDoc2.get(opsDoc2.size() - 1);
final String lastFieldValueDoc2;
if (lastOpDoc2 instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOpDoc2;
lastFieldValueDoc2 = index.docs().get(0).get("value");
} else {
// delete
lastFieldValueDoc2 = null;
}
// randomly interleave
final AtomicLong seqNoGenerator = new AtomicLong();
BiFunction<Engine.Operation, Long, Engine.Operation> seqNoUpdater = (operation, newSeqNo) -> {
if (operation instanceof Engine.Index) {
Engine.Index index = (Engine.Index) operation;
Document doc = testDocumentWithTextField(index.docs().get(0).get("value"));
ParsedDocument parsedDocument = testParsedDocument(index.id(), index.routing(), doc, index.source(), null);
return new Engine.Index(index.uid(), parsedDocument, newSeqNo, index.primaryTerm(), index.version(),
index.versionType(), index.origin(), index.startTime(), index.getAutoGeneratedIdTimestamp(), index.isRetry(),
UNASSIGNED_SEQ_NO, 0);
} else {
Engine.Delete delete = (Engine.Delete) operation;
return new Engine.Delete(
delete.id(),
delete.uid(),
newSeqNo,
delete.primaryTerm(),
delete.version(),
delete.versionType(),
delete.origin(),
delete.startTime(),
UNASSIGNED_SEQ_NO,
0
);
}
};
final List<Engine.Operation> allOps = new ArrayList<>();
Iterator<Engine.Operation> iter1 = opsDoc1.iterator();
Iterator<Engine.Operation> iter2 = opsDoc2.iterator();
while (iter1.hasNext() && iter2.hasNext()) {
final Engine.Operation next = randomBoolean() ? iter1.next() : iter2.next();
allOps.add(seqNoUpdater.apply(next, seqNoGenerator.getAndIncrement()));
}
iter1.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o, seqNoGenerator.getAndIncrement())));
iter2.forEachRemaining(o -> allOps.add(seqNoUpdater.apply(o, seqNoGenerator.getAndIncrement())));
// insert some duplicates
randomSubsetOf(allOps).forEach(op -> allOps.add(seqNoUpdater.apply(op, op.seqNo())));
shuffle(allOps, random());
concurrentlyApplyOps(allOps, engine);
engine.refresh("test");
if (lastFieldValueDoc1 != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValueDoc1)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
if (lastFieldValueDoc2 != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValueDoc2)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
int totalExpectedOps = 0;
if (lastFieldValueDoc1 != null) {
totalExpectedOps++;
}
if (lastFieldValueDoc2 != null) {
totalExpectedOps++;
}
assertVisibleCount(engine, totalExpectedOps);
}
@Test
public void testInternalVersioningOnPrimary() throws IOException {
final List<Engine.Operation> ops = generateSingleDocHistory(
false, VersionType.INTERNAL, false, 2, 2, 20, "1");
assertOpsOnPrimary(ops, Versions.NOT_FOUND, true, engine);
}
@Test
public void testVersionOnPrimaryWithConcurrentRefresh() throws Exception {
List<Engine.Operation> ops = generateSingleDocHistory(
false, VersionType.INTERNAL, false, 2, 10, 100, "1");
CountDownLatch latch = new CountDownLatch(1);
AtomicBoolean running = new AtomicBoolean(true);
Thread refreshThread = new Thread(() -> {
latch.countDown();
while (running.get()) {
engine.refresh("test");
}
});
refreshThread.start();
try {
latch.await();
assertOpsOnPrimary(ops, Versions.NOT_FOUND, true, engine);
} finally {
running.set(false);
refreshThread.join();
}
}
private int assertOpsOnPrimary(List<Engine.Operation> ops,
long currentOpVersion,
boolean docDeleted,
InternalEngine engine)
throws IOException {
String lastFieldValue = null;
int opsPerformed = 0;
long lastOpVersion = currentOpVersion;
long lastOpSeqNo = UNASSIGNED_SEQ_NO;
long lastOpTerm = UNASSIGNED_PRIMARY_TERM;
PrimaryTermSupplier currentTerm = (PrimaryTermSupplier) engine.engineConfig.getPrimaryTermSupplier();
BiFunction<Long, Engine.Index, Engine.Index> indexWithVersion = (version, index) -> new Engine.Index(
index.uid(),
index.parsedDoc(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
version,
index.versionType(),
index.origin(),
index.startTime(),
index.getAutoGeneratedIdTimestamp(),
index.isRetry(),
UNASSIGNED_SEQ_NO,
0);
BiFunction<Long, Engine.Delete, Engine.Delete> delWithVersion = (version, delete) -> new Engine.Delete(
delete.id(),
delete.uid(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
version,
delete.versionType(),
delete.origin(),
delete.startTime(),
UNASSIGNED_SEQ_NO,
0);
TriFunction<Long, Long, Engine.Index, Engine.Index> indexWithSeq = (seqNo, term, index) -> new Engine.Index(
index.uid(),
index.parsedDoc(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
index.version(),
index.versionType(),
index.origin(),
index.startTime(),
index.getAutoGeneratedIdTimestamp(),
index.isRetry(),
seqNo,
term);
TriFunction<Long, Long, Engine.Delete, Engine.Delete> delWithSeq = (seqNo, term, delete) -> new Engine.Delete(
delete.id(),
delete.uid(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
delete.version(),
delete.versionType(),
delete.origin(),
delete.startTime(),
seqNo,
term);
Function<Engine.Index, Engine.Index> indexWithCurrentTerm = index -> new Engine.Index(
index.uid(),
index.parsedDoc(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
index.version(),
index.versionType(),
index.origin(),
index.startTime(),
index.getAutoGeneratedIdTimestamp(),
index.isRetry(),
index.getIfSeqNo(),
index.getIfPrimaryTerm());
Function<Engine.Delete, Engine.Delete> deleteWithCurrentTerm = delete -> new Engine.Delete(
delete.id(),
delete.uid(),
UNASSIGNED_SEQ_NO,
currentTerm.get(),
delete.version(),
delete.versionType(),
delete.origin(),
delete.startTime(),
delete.getIfSeqNo(),
delete.getIfPrimaryTerm());
for (Engine.Operation op : ops) {
final boolean versionConflict = rarely();
final boolean versionedOp = versionConflict || randomBoolean();
final long conflictingVersion = docDeleted || randomBoolean() ?
lastOpVersion + (randomBoolean() ? 1 : -1) :
Versions.MATCH_DELETED;
final long conflictingSeqNo = lastOpSeqNo == UNASSIGNED_SEQ_NO || randomBoolean() ?
lastOpSeqNo + 5 : // use 5 to go above 0 for magic numbers
lastOpSeqNo;
final long conflictingTerm = conflictingSeqNo == lastOpSeqNo || randomBoolean() ? lastOpTerm + 1 : lastOpTerm;
if (rarely()) {
currentTerm.set(currentTerm.get() + 1L);
engine.rollTranslogGeneration();
}
final long correctVersion = docDeleted ? Versions.MATCH_DELETED : lastOpVersion;
logger.info("performing [{}]{}{}",
op.operationType().name().charAt(0),
versionConflict ? " (conflict " + conflictingVersion + ")" : "",
versionedOp ? " (versioned " + correctVersion + ", seqNo " + lastOpSeqNo + ", term " + lastOpTerm + " )" : "");
if (op instanceof Engine.Index) {
final Engine.Index index = (Engine.Index) op;
if (versionConflict) {
// generate a conflict
final Engine.IndexResult result;
if (randomBoolean()) {
result = engine.index(indexWithSeq.apply(conflictingSeqNo, conflictingTerm, index));
} else {
result = engine.index(indexWithVersion.apply(conflictingVersion, index));
}
assertThat(result.isCreated(), equalTo(false));
assertThat(result.getVersion(), equalTo(lastOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
} else {
final Engine.IndexResult result;
if (versionedOp) {
// TODO: add support for non-existing docs
if (randomBoolean() && lastOpSeqNo != SequenceNumbers.UNASSIGNED_SEQ_NO && docDeleted == false) {
result = engine.index(indexWithSeq.apply(lastOpSeqNo, lastOpTerm, index));
} else {
result = engine.index(indexWithVersion.apply(correctVersion, index));
}
} else {
result = engine.index(indexWithCurrentTerm.apply(index));
}
assertThat(result.isCreated(), equalTo(docDeleted));
assertThat(result.getVersion(), equalTo(Math.max(lastOpVersion + 1, 1)));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
lastFieldValue = index.docs().get(0).get("value");
docDeleted = false;
lastOpVersion = result.getVersion();
lastOpSeqNo = result.getSeqNo();
lastOpTerm = result.getTerm();
opsPerformed++;
}
} else {
final Engine.Delete delete = (Engine.Delete) op;
if (versionConflict) {
// generate a conflict
Engine.DeleteResult result;
if (randomBoolean()) {
result = engine.delete(delWithSeq.apply(conflictingSeqNo, conflictingTerm, delete));
} else {
result = engine.delete(delWithVersion.apply(conflictingVersion, delete));
}
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(lastOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
} else {
final Engine.DeleteResult result;
long correctSeqNo = docDeleted ? UNASSIGNED_SEQ_NO : lastOpSeqNo;
if (versionedOp && lastOpSeqNo != UNASSIGNED_SEQ_NO && randomBoolean()) {
result = engine.delete(delWithSeq.apply(correctSeqNo, lastOpTerm, delete));
} else if (versionedOp) {
result = engine.delete(delWithVersion.apply(correctVersion, delete));
} else {
result = engine.delete(deleteWithCurrentTerm.apply(delete));
}
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(Math.max(lastOpVersion + 1, 1)));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = true;
lastOpVersion = result.getVersion();
lastOpSeqNo = result.getSeqNo();
lastOpTerm = result.getTerm();
opsPerformed++;
}
}
if (randomBoolean()) {
// refresh and take the chance to check everything is ok so far
assertVisibleCount(engine, docDeleted ? 0 : 1);
// even if doc is not not deleted, lastFieldValue can still be null if this is the
// first op and it failed.
if (docDeleted == false && lastFieldValue != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
if (randomBoolean()) {
engine.flush();
engine.refresh("test");
}
if (rarely()) {
// simulate GC deletes
engine.refresh("gc_simulation", Engine.SearcherScope.INTERNAL, true);
engine.clearDeletedTombstones();
if (docDeleted) {
lastOpVersion = Versions.NOT_FOUND;
lastOpSeqNo = UNASSIGNED_SEQ_NO;
lastOpTerm = UNASSIGNED_PRIMARY_TERM;
}
}
}
assertVisibleCount(engine, docDeleted ? 0 : 1);
if (docDeleted == false) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
return opsPerformed;
}
@Test
public void testNonInternalVersioningOnPrimary() throws IOException {
final Set<VersionType> nonInternalVersioning = new HashSet<>(Arrays.asList(VersionType.values()));
nonInternalVersioning.remove(VersionType.INTERNAL);
final VersionType versionType = randomFrom(nonInternalVersioning);
final List<Engine.Operation> ops = generateSingleDocHistory(
false, versionType, false, 2, 2, 20, "1");
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOp;
lastFieldValue = index.docs().get(0).get("value");
} else {
// delete
lastFieldValue = null;
}
// other version types don't support out of order processing.
if (versionType == VersionType.EXTERNAL) {
shuffle(ops, random());
}
long highestOpVersion = Versions.NOT_FOUND;
long seqNo = -1;
boolean docDeleted = true;
for (Engine.Operation op : ops) {
logger.info("performing [{}], v [{}], seq# [{}], term [{}]",
op.operationType().name().charAt(0), op.version(), op.seqNo(), op.primaryTerm());
if (op instanceof Engine.Index) {
final Engine.Index index = (Engine.Index) op;
Engine.IndexResult result = engine.index(index);
if (op.versionType().isVersionConflictForWrites(highestOpVersion, op.version(), docDeleted) == false) {
seqNo++;
assertThat(result.getSeqNo(), equalTo(seqNo));
assertThat(result.isCreated(), equalTo(docDeleted));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = false;
highestOpVersion = op.version();
} else {
assertThat(result.isCreated(), equalTo(false));
assertThat(result.getVersion(), equalTo(highestOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
}
} else {
final Engine.Delete delete = (Engine.Delete) op;
Engine.DeleteResult result = engine.delete(delete);
if (op.versionType().isVersionConflictForWrites(highestOpVersion, op.version(), docDeleted) == false) {
seqNo++;
assertThat(result.getSeqNo(), equalTo(seqNo));
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(op.version()));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
assertThat(result.getFailure(), nullValue());
docDeleted = true;
highestOpVersion = op.version();
} else {
assertThat(result.isFound(), equalTo(docDeleted == false));
assertThat(result.getVersion(), equalTo(highestOpVersion));
assertThat(result.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(result.getFailure(), instanceOf(VersionConflictEngineException.class));
}
}
if (randomBoolean()) {
engine.refresh("test");
}
if (randomBoolean()) {
engine.flush();
engine.refresh("test");
}
}
assertVisibleCount(engine, docDeleted ? 0 : 1);
if (docDeleted == false) {
logger.info("searching for [{}]", lastFieldValue);
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
@Test
public void testVersioningPromotedReplica() throws IOException {
final List<Engine.Operation> replicaOps = generateSingleDocHistory(
true, VersionType.INTERNAL, false, 1, 2, 20, "1");
List<Engine.Operation> primaryOps = generateSingleDocHistory(
false, VersionType.INTERNAL, false, 2, 2, 20, "1");
Engine.Operation lastReplicaOp = replicaOps.get(replicaOps.size() - 1);
final boolean deletedOnReplica = lastReplicaOp instanceof Engine.Delete;
final long finalReplicaVersion = lastReplicaOp.version();
final long finalReplicaSeqNo = lastReplicaOp.seqNo();
assertOpsOnReplica(replicaOps, replicaEngine, true, logger);
final int opsOnPrimary = assertOpsOnPrimary(primaryOps, finalReplicaVersion, deletedOnReplica, replicaEngine);
final long currentSeqNo = getSequenceID(
replicaEngine,
new Engine.Get(lastReplicaOp.uid().text(), lastReplicaOp.uid())).v1();
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new MatchAllDocsQuery(), collector);
if (collector.getTotalHits() > 0) {
// last op wasn't delete
assertThat(currentSeqNo, equalTo(finalReplicaSeqNo + opsOnPrimary));
}
}
}
@Test
public void testConcurrentExternalVersioningOnPrimary() throws IOException, InterruptedException {
final List<Engine.Operation> ops = generateSingleDocHistory(
false, VersionType.EXTERNAL, false, 2, 100, 300, "1");
final Engine.Operation lastOp = ops.get(ops.size() - 1);
final String lastFieldValue;
if (lastOp instanceof Engine.Index) {
Engine.Index index = (Engine.Index) lastOp;
lastFieldValue = index.docs().get(0).get("value");
} else {
// delete
lastFieldValue = null;
}
shuffle(ops, random());
concurrentlyApplyOps(ops, engine);
assertVisibleCount(engine, lastFieldValue == null ? 0 : 1);
if (lastFieldValue != null) {
try (Searcher searcher = engine.acquireSearcher("test")) {
final TotalHitCountCollector collector = new TotalHitCountCollector();
searcher.search(new TermQuery(new Term("value", lastFieldValue)), collector);
assertThat(collector.getTotalHits(), equalTo(1));
}
}
}
@Test
public void testConcurrentGetAndSetOnPrimary() throws IOException, InterruptedException {
Thread[] thread = new Thread[randomIntBetween(3, 5)];
CountDownLatch startGun = new CountDownLatch(thread.length);
final int opsPerThread = randomIntBetween(10, 20);
class OpAndVersion {
final long version;
final String removed;
final String added;
OpAndVersion(long version, String removed, String added) {
this.version = version;
this.removed = removed;
this.added = added;
}
}
final AtomicInteger idGenerator = new AtomicInteger();
final Queue<OpAndVersion> history = ConcurrentCollections.newQueue();
ParsedDocument doc = testParsedDocument("1", null, testDocument(), bytesArray(""), null);
final Term uidTerm = newUid(doc);
engine.index(indexForDoc(doc));
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
for (int i = 0; i < thread.length; i++) {
thread[i] = new Thread(() -> {
startGun.countDown();
try {
startGun.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
for (int op = 0; op < opsPerThread; op++) {
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), uidTerm), searcherFactory)) {
FieldsVisitor visitor = new FieldsVisitor(true);
get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor);
List<String> values = new ArrayList<>(Strings.commaDelimitedListToSet(visitor.source().utf8ToString()));
String removed = op % 3 == 0 && values.size() > 0 ? values.remove(0) : null;
String added = "v_" + idGenerator.incrementAndGet();
values.add(added);
Engine.Index index = new Engine.Index(uidTerm,
testParsedDocument("1", null, testDocument(),
bytesArray(Strings.collectionToCommaDelimitedString(values)), null),
UNASSIGNED_SEQ_NO, 2,
get.docIdAndVersion().version, VersionType.INTERNAL,
PRIMARY, System.currentTimeMillis(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(index);
if (indexResult.getResultType() == Engine.Result.Type.SUCCESS) {
history.add(new OpAndVersion(indexResult.getVersion(), removed, added));
}
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
thread[i].start();
}
for (int i = 0; i < thread.length; i++) {
thread[i].join();
}
List<OpAndVersion> sortedHistory = new ArrayList<>(history);
sortedHistory.sort(Comparator.comparing(o -> o.version));
Set<String> currentValues = new HashSet<>();
for (int i = 0; i < sortedHistory.size(); i++) {
OpAndVersion op = sortedHistory.get(i);
if (i > 0) {
assertThat("duplicate version", op.version, not(equalTo(sortedHistory.get(i - 1).version)));
}
boolean exists = op.removed == null ? true : currentValues.remove(op.removed);
assertTrue(op.removed + " should exist", exists);
exists = currentValues.add(op.added);
assertTrue(op.added + " should not exist", exists);
}
try (Engine.GetResult get = engine.get(new Engine.Get(doc.id(), uidTerm), searcherFactory)) {
FieldsVisitor visitor = new FieldsVisitor(true);
get.docIdAndVersion().reader.document(get.docIdAndVersion().docId, visitor);
List<String> values = Arrays.asList(Strings.commaDelimitedListToStringArray(visitor.source().utf8ToString()));
assertThat(currentValues, equalTo(new HashSet<>(values)));
}
}
@Test
public void testBasicCreatedFlag() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(), B_1, null);
Engine.Index index = indexForDoc(doc);
Engine.IndexResult indexResult = engine.index(index);
assertTrue(indexResult.isCreated());
index = indexForDoc(doc);
indexResult = engine.index(index);
assertFalse(indexResult.isCreated());
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
index = indexForDoc(doc);
indexResult = engine.index(index);
assertTrue(indexResult.isCreated());
}
private static class MockAppender extends AbstractAppender {
public boolean sawIndexWriterMessage;
public boolean sawIndexWriterIFDMessage;
MockAppender(final String name) throws IllegalAccessException {
super(name, RegexFilter.createFilter(".*(\n.*)*", new String[0],
false, null, null), null);
}
@Override
public void append(LogEvent event) {
final String formattedMessage = event.getMessage().getFormattedMessage();
if (event.getLevel() == Level.TRACE && event.getMarker().getName().contains("[index][0]")) {
if (event.getLoggerName().endsWith(".IW") &&
formattedMessage.contains("IW: now apply all deletes")) {
sawIndexWriterMessage = true;
}
if (event.getLoggerName().endsWith(".IFD")) {
sawIndexWriterIFDMessage = true;
}
}
}
}
// #5891: make sure IndexWriter's infoStream output is
// sent to lucene.iw with log level TRACE:
@Test
public void testIndexWriterInfoStream() throws IllegalAccessException, IOException {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender("testIndexWriterInfoStream");
mockAppender.start();
Logger rootLogger = LogManager.getRootLogger();
Level savedLevel = rootLogger.getLevel();
Loggers.addAppender(rootLogger, mockAppender);
Loggers.setLevel(rootLogger, Level.DEBUG);
rootLogger = LogManager.getRootLogger();
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
// Again, with TRACE, which should log IndexWriter output:
Loggers.setLevel(rootLogger, Level.TRACE);
engine.index(indexForDoc(doc));
engine.flush();
assertTrue(mockAppender.sawIndexWriterMessage);
} finally {
Loggers.removeAppender(rootLogger, mockAppender);
mockAppender.stop();
Loggers.setLevel(rootLogger, savedLevel);
}
}
@Test
public void testSeqNoAndCheckpoints() throws IOException, InterruptedException {
final int opCount = randomIntBetween(1, 256);
long primarySeqNo = SequenceNumbers.NO_OPS_PERFORMED;
final String[] ids = new String[]{"1", "2", "3"};
final Set<String> indexedIds = new HashSet<>();
long localCheckpoint = SequenceNumbers.NO_OPS_PERFORMED;
long replicaLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED;
final long globalCheckpoint;
long maxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
IOUtils.close(store, engine);
store = createStore();
InternalEngine initialEngine = null;
try {
initialEngine = createEngine(defaultSettings, store, createTempDir(), newLogMergePolicy(), null);
final ShardRouting primary = TestShardRouting.newShardRouting("test",
shardId.id(), "node1", null, true,
ShardRoutingState.STARTED, allocationId);
final ShardRouting initializingReplica =
TestShardRouting.newShardRouting(shardId, "node2", false, ShardRoutingState.INITIALIZING);
ReplicationTracker gcpTracker = (ReplicationTracker) initialEngine.config().getGlobalCheckpointSupplier();
gcpTracker.updateFromMaster(1L, new HashSet<>(Collections.singletonList(primary.allocationId().getId())),
new IndexShardRoutingTable.Builder(shardId).addShard(primary).build());
gcpTracker.activatePrimaryMode(primarySeqNo);
if (defaultSettings.isSoftDeleteEnabled()) {
final CountDownLatch countDownLatch = new CountDownLatch(1);
gcpTracker.addPeerRecoveryRetentionLease(initializingReplica.currentNodeId(),
SequenceNumbers.NO_OPS_PERFORMED, ActionListener.wrap(countDownLatch::countDown));
countDownLatch.await(5, TimeUnit.SECONDS);
}
gcpTracker.updateFromMaster(2L, new HashSet<>(Collections.singletonList(primary.allocationId().getId())),
new IndexShardRoutingTable.Builder(shardId).addShard(primary).addShard(initializingReplica).build());
gcpTracker.initiateTracking(initializingReplica.allocationId().getId());
gcpTracker.markAllocationIdAsInSync(initializingReplica.allocationId().getId(), replicaLocalCheckpoint);
final ShardRouting replica = initializingReplica.moveToStarted();
gcpTracker.updateFromMaster(3L, new HashSet<>(Arrays.asList(primary.allocationId().getId(), replica.allocationId().getId())),
new IndexShardRoutingTable.Builder(shardId).addShard(primary).addShard(replica).build());
for (int op = 0; op < opCount; op++) {
final String id;
// mostly index, sometimes delete
if (rarely() && indexedIds.isEmpty() == false) {
// we have some docs indexed, so delete one of them
id = randomFrom(indexedIds);
final Engine.Delete delete = new Engine.Delete(
id,
newUid(id),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
rarely() ? 100 : Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
final Engine.DeleteResult result = initialEngine.delete(delete);
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1));
indexedIds.remove(id);
primarySeqNo++;
} else {
assertThat(result.getSeqNo(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
}
} else {
// index a document
id = randomFrom(ids);
ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
final Engine.Index index = new Engine.Index(newUid(doc), doc,
UNASSIGNED_SEQ_NO, primaryTerm.get(),
rarely() ? 100 : Versions.MATCH_ANY, VersionType.INTERNAL,
PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
final Engine.IndexResult result = initialEngine.index(index);
if (result.getResultType() == Engine.Result.Type.SUCCESS) {
assertThat(result.getSeqNo(), equalTo(primarySeqNo + 1));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo + 1));
indexedIds.add(id);
primarySeqNo++;
} else {
assertThat(result.getSeqNo(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
}
}
initialEngine.syncTranslog(); // to advance persisted local checkpoint
if (randomInt(10) < 3) {
// only update rarely as we do it every doc
replicaLocalCheckpoint = randomIntBetween(Math.toIntExact(replicaLocalCheckpoint), Math.toIntExact(primarySeqNo));
}
gcpTracker.updateLocalCheckpoint(primary.allocationId().getId(),
initialEngine.getPersistedLocalCheckpoint());
gcpTracker.updateLocalCheckpoint(initializingReplica.allocationId().getId(), replicaLocalCheckpoint);
if (rarely()) {
localCheckpoint = primarySeqNo;
maxSeqNo = primarySeqNo;
initialEngine.flush(true, true);
}
}
logger.info("localcheckpoint {}, global {}", replicaLocalCheckpoint, primarySeqNo);
globalCheckpoint = gcpTracker.getGlobalCheckpoint();
assertEquals(primarySeqNo, initialEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(primarySeqNo, initialEngine.getPersistedLocalCheckpoint());
assertThat(globalCheckpoint, equalTo(replicaLocalCheckpoint));
assertThat(
Long.parseLong(initialEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
equalTo(localCheckpoint));
initialEngine.getTranslog().sync(); // to guarantee the global checkpoint is written to the translog checkpoint
assertThat(
initialEngine.getTranslog().getLastSyncedGlobalCheckpoint(),
equalTo(globalCheckpoint));
assertThat(
Long.parseLong(initialEngine.commitStats().getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
equalTo(maxSeqNo));
} finally {
IOUtils.close(initialEngine);
}
try (InternalEngine recoveringEngine = new InternalEngine(initialEngine.config())) {
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(primarySeqNo, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertThat(
Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)),
equalTo(primarySeqNo));
assertThat(
recoveringEngine.getTranslog().getLastSyncedGlobalCheckpoint(),
equalTo(globalCheckpoint));
assertThat(
Long.parseLong(recoveringEngine.commitStats().getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
// after recovering from translog, all docs have been flushed to Lucene segments, so here we will assert
// that the committed max seq no is equivalent to what the current primary seq no is, as all data
// we have assigned sequence numbers to should be in the commit
equalTo(primarySeqNo));
assertThat(recoveringEngine.getProcessedLocalCheckpoint(), equalTo(primarySeqNo));
assertThat(recoveringEngine.getPersistedLocalCheckpoint(), equalTo(primarySeqNo));
assertThat(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo(primarySeqNo));
assertThat(generateNewSeqNo(recoveringEngine), equalTo(primarySeqNo + 1));
}
}
// this test writes documents to the engine while concurrently flushing/commit
// and ensuring that the commit points contain the correct sequence number data
@Test
public void testConcurrentWritesAndCommits() throws Exception {
List<Engine.IndexCommitRef> commits = new ArrayList<>();
try (Store store = createStore();
InternalEngine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) {
final int numIndexingThreads = scaledRandomIntBetween(2, 4);
final int numDocsPerThread = randomIntBetween(500, 1000);
final CyclicBarrier barrier = new CyclicBarrier(numIndexingThreads + 1);
final List<Thread> indexingThreads = new ArrayList<>();
final CountDownLatch doneLatch = new CountDownLatch(numIndexingThreads);
// create N indexing threads to index documents simultaneously
for (int threadNum = 0; threadNum < numIndexingThreads; threadNum++) {
final int threadIdx = threadNum;
Thread indexingThread = new Thread(() -> {
try {
barrier.await(); // wait for all threads to start at the same time
// index random number of docs
for (int i = 0; i < numDocsPerThread; i++) {
final String id = "thread" + threadIdx + "#" + i;
ParsedDocument doc = testParsedDocument(id, null, testDocument(), B_1, null);
engine.index(indexForDoc(doc));
}
} catch (Exception e) {
throw new RuntimeException(e);
} finally {
doneLatch.countDown();
}
});
indexingThreads.add(indexingThread);
}
// start the indexing threads
for (Thread thread : indexingThreads) {
thread.start();
}
barrier.await(); // wait for indexing threads to all be ready to start
int commitLimit = randomIntBetween(10, 20);
long sleepTime = 1;
// create random commit points
boolean doneIndexing;
do {
doneIndexing = doneLatch.await(sleepTime, TimeUnit.MILLISECONDS);
commits.add(engine.acquireLastIndexCommit(true));
if (commits.size() > commitLimit) { // don't keep on piling up too many commits
IOUtils.close(commits.remove(randomIntBetween(0, commits.size()-1)));
// we increase the wait time to make sure we eventually if things are slow wait for threads to finish.
// this will reduce pressure on disks and will allow threads to make progress without piling up too many commits
sleepTime = sleepTime * 2;
}
} while (doneIndexing == false);
// now, verify all the commits have the correct docs according to the user commit data
long prevLocalCheckpoint = SequenceNumbers.NO_OPS_PERFORMED;
long prevMaxSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
for (Engine.IndexCommitRef commitRef : commits) {
final IndexCommit commit = commitRef.getIndexCommit();
Map<String, String> userData = commit.getUserData();
long localCheckpoint = userData.containsKey(SequenceNumbers.LOCAL_CHECKPOINT_KEY) ?
Long.parseLong(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY)) :
SequenceNumbers.NO_OPS_PERFORMED;
long maxSeqNo = userData.containsKey(SequenceNumbers.MAX_SEQ_NO) ?
Long.parseLong(userData.get(SequenceNumbers.MAX_SEQ_NO)) :
UNASSIGNED_SEQ_NO;
// local checkpoint and max seq no shouldn't go backwards
assertThat(localCheckpoint, greaterThanOrEqualTo(prevLocalCheckpoint));
assertThat(maxSeqNo, greaterThanOrEqualTo(prevMaxSeqNo));
try (IndexReader reader = DirectoryReader.open(commit)) {
Long highest = getHighestSeqNo(reader);
final long highestSeqNo;
if (highest != null) {
highestSeqNo = highest.longValue();
} else {
highestSeqNo = SequenceNumbers.NO_OPS_PERFORMED;
}
// make sure localCheckpoint <= highest seq no found <= maxSeqNo
assertThat(highestSeqNo, greaterThanOrEqualTo(localCheckpoint));
assertThat(highestSeqNo, lessThanOrEqualTo(maxSeqNo));
// make sure all sequence numbers up to and including the local checkpoint are in the index
FixedBitSet seqNosBitSet = getSeqNosSet(reader, highestSeqNo);
for (int i = 0; i <= localCheckpoint; i++) {
assertTrue("local checkpoint [" + localCheckpoint + "], _seq_no [" + i + "] should be indexed",
seqNosBitSet.get(i));
}
}
prevLocalCheckpoint = localCheckpoint;
prevMaxSeqNo = maxSeqNo;
}
}
}
private static Long getHighestSeqNo(final IndexReader reader) throws IOException {
final String fieldName = SeqNoFieldMapper.NAME;
long size = PointValues.size(reader, fieldName);
if (size == 0) {
return null;
}
byte[] max = PointValues.getMaxPackedValue(reader, fieldName);
return LongPoint.decodeDimension(max, 0);
}
private static FixedBitSet getSeqNosSet(final IndexReader reader, final long highestSeqNo) throws IOException {
// _seq_no are stored as doc values for the time being, so this is how we get them
// (as opposed to using an IndexSearcher or IndexReader)
final FixedBitSet bitSet = new FixedBitSet((int) highestSeqNo + 1);
final List<LeafReaderContext> leaves = reader.leaves();
if (leaves.isEmpty()) {
return bitSet;
}
for (int i = 0; i < leaves.size(); i++) {
final LeafReader leaf = leaves.get(i).reader();
final NumericDocValues values = leaf.getNumericDocValues(SeqNoFieldMapper.NAME);
if (values == null) {
continue;
}
final Bits bits = leaf.getLiveDocs();
for (int docID = 0; docID < leaf.maxDoc(); docID++) {
if (bits == null || bits.get(docID)) {
if (values.advanceExact(docID) == false) {
throw new AssertionError("Document does not have a seq number: " + docID);
}
final long seqNo = values.longValue();
assertFalse("should not have more than one document with the same seq_no[" +
seqNo + "]", bitSet.get((int) seqNo));
bitSet.set((int) seqNo);
}
}
}
return bitSet;
}
// #8603: make sure we can separately log IFD's messages
@Test
public void testIndexWriterIFDInfoStream() throws IllegalAccessException, IOException {
assumeFalse("who tests the tester?", VERBOSE);
MockAppender mockAppender = new MockAppender("testIndexWriterIFDInfoStream");
mockAppender.start();
final Logger iwIFDLogger = LogManager.getLogger("org.elasticsearch.index.engine.Engine.IFD");
Loggers.addAppender(iwIFDLogger, mockAppender);
Loggers.setLevel(iwIFDLogger, Level.DEBUG);
try {
// First, with DEBUG, which should NOT log IndexWriter output:
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
engine.index(indexForDoc(doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertFalse(mockAppender.sawIndexWriterIFDMessage);
// Again, with TRACE, which should only log IndexWriter IFD output:
Loggers.setLevel(iwIFDLogger, Level.TRACE);
engine.index(indexForDoc(doc));
engine.flush();
assertFalse(mockAppender.sawIndexWriterMessage);
assertTrue(mockAppender.sawIndexWriterIFDMessage);
} finally {
Loggers.removeAppender(iwIFDLogger, mockAppender);
mockAppender.stop();
Loggers.setLevel(iwIFDLogger, (Level) null);
}
}
@Test
public void testEnableGcDeletes() throws Exception {
try (Store store = createStore();
Engine engine = createEngine(config(defaultSettings, store, createTempDir(), newMergePolicy(), null))) {
engine.config().setEnableGcDeletes(false);
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
// Add document
Document document = testDocument();
document.add(new TextField("value", "test1", Field.Store.YES));
ParsedDocument doc = testParsedDocument("1", null, document, B_2, null);
engine.index(new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 1,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
// Delete document we just added:
engine.delete(new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
0,
10,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// Get should not find the document
Engine.GetResult getResult = engine.get(newGet(doc), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
// Give the gc pruning logic a chance to kick in
Thread.sleep(1000);
if (randomBoolean()) {
engine.refresh("test");
}
// Delete non-existent document
engine.delete(new Engine.Delete(
"2",
newUid("2"),
UNASSIGNED_SEQ_NO,
0,
10,
VersionType.EXTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// Get should not find the document (we never indexed uid=2):
getResult = engine.get(new Engine.Get("2", newUid("2")), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
// Try to index uid=1 with a too-old version, should fail:
Engine.Index index = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2,
VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(index);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class));
// Get should still not find the document
getResult = engine.get(newGet(doc), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
// Try to index uid=2 with a too-old version, should fail:
Engine.Index index1 = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2,
VersionType.EXTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
indexResult = engine.index(index1);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.FAILURE));
assertThat(indexResult.getFailure(), instanceOf(VersionConflictEngineException.class));
// Get should not find the document
getResult = engine.get(newGet(doc), searcherFactory);
assertThat(getResult.docIdAndVersion(), is(nullValue()));
}
}
@Test
public void testExtractShardId() {
try (Engine.Searcher test = this.engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
ShardId shardId = ShardUtils.extractShardId(test.getDirectoryReader());
assertNotNull(shardId);
assertEquals(shardId, engine.config().getShardId());
}
}
/**
* Random test that throws random exception and ensures all references are
* counted down / released and resources are closed.
*/
@Test
public void testFailStart() throws IOException {
// this test fails if any reader, searcher or directory is not closed - MDW FTW
final int iters = scaledRandomIntBetween(10, 100);
for (int i = 0; i < iters; i++) {
MockDirectoryWrapper wrapper = newMockDirectory();
wrapper.setFailOnOpenInput(randomBoolean());
wrapper.setAllowRandomFileNotFoundException(randomBoolean());
wrapper.setRandomIOExceptionRate(randomDouble());
wrapper.setRandomIOExceptionRateOnOpen(randomDouble());
final Path translogPath = createTempDir("testFailStart");
try (Store store = createStore(wrapper)) {
int refCount = store.refCount();
assertTrue("refCount: " + store.refCount(), store.refCount() > 0);
InternalEngine holder;
try {
holder = createEngine(store, translogPath);
} catch (EngineCreationFailureException | IOException ex) {
assertEquals(store.refCount(), refCount);
continue;
}
assertEquals(store.refCount(), refCount + 1);
final int numStarts = scaledRandomIntBetween(1, 5);
for (int j = 0; j < numStarts; j++) {
try {
assertEquals(store.refCount(), refCount + 1);
holder.close();
holder = createEngine(store, translogPath);
assertEquals(store.refCount(), refCount + 1);
} catch (EngineCreationFailureException ex) {
// all is fine
assertEquals(store.refCount(), refCount);
break;
}
}
holder.close();
assertEquals(store.refCount(), refCount);
}
}
}
@Test
public void testSettings() {
CodecService codecService = new CodecService(null, logger);
LiveIndexWriterConfig currentIndexWriterConfig = engine.getCurrentIndexWriterConfig();
assertEquals(engine.config().getCodec().getName(), codecService.codec(codecName).getName());
assertEquals(currentIndexWriterConfig.getCodec().getName(), codecService.codec(codecName).getName());
}
@Test
public void testCurrentTranslogIDisCommitted() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(), null, null,
globalCheckpoint::get);
// create
{
store.createEmpty(Version.CURRENT.luceneVersion);
final String translogUUID =
Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
ParsedDocument doc = testParsedDocument(Integer.toString(0), null, testDocument(),
new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
try (InternalEngine engine = createEngine(config)) {
engine.index(firstIndexRequest);
engine.syncTranslog(); // to advance persisted local checkpoint
assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint());
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
expectThrows(IllegalStateException.class, () -> engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE));
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
}
}
// open and recover tlog
{
for (int i = 0; i < 2; i++) {
try (InternalEngine engine = new InternalEngine(config)) {
expectThrows(IllegalStateException.class, engine::ensureCanFlush);
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
}
}
}
// open index with new tlog
{
final String translogUUID =
Translog.createEmptyTranslog(config.getTranslogConfig().getTranslogPath(),
SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
try (InternalEngine engine = new InternalEngine(config)) {
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(2, engine.getTranslog().currentFileGeneration());
}
}
// open and recover tlog with empty tlog
{
for (int i = 0; i < 2; i++) {
try (InternalEngine engine = new InternalEngine(config)) {
Map<String, String> userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
userData = engine.getLastCommittedSegmentInfos().getUserData();
assertEquals(engine.getTranslog().getTranslogUUID(), userData.get(Translog.TRANSLOG_UUID_KEY));
}
}
}
}
}
@Test
public void testMissingTranslog() throws IOException {
// test that we can force start the engine , even if the translog is missing.
engine.close();
// fake a new translog, causing the engine to point to a missing one.
final long newPrimaryTerm = randomLongBetween(0L, primaryTerm.get());
final Translog translog = createTranslog(() -> newPrimaryTerm);
long id = translog.currentFileGeneration();
translog.close();
IOUtils.rm(translog.location().resolve(Translog.getFilename(id)));
expectThrows(EngineCreationFailureException.class, "engine shouldn't start without a valid translog id",
() -> createEngine(store, primaryTranslogDir));
// when a new translog is created it should be ok
final String translogUUID = Translog.createEmptyTranslog(primaryTranslogDir, UNASSIGNED_SEQ_NO, shardId, newPrimaryTerm);
store.associateIndexWithNewTranslog(translogUUID);
EngineConfig config = config(defaultSettings, store, primaryTranslogDir, newMergePolicy(), null);
engine = new InternalEngine(config);
}
@Test
public void testTranslogReplayWithFailure() throws IOException {
final MockDirectoryWrapper directory = newMockDirectory();
final Path translogPath = createTempDir("testTranslogReplayWithFailure");
try (Store store = createStore(directory)) {
final int numDocs = randomIntBetween(1, 10);
try (InternalEngine engine = createEngine(store, translogPath)) {
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
assertVisibleCount(engine, numDocs);
}
// since we rollback the IW we are writing the same segment files again after starting IW but MDW prevents
// this so we have to disable the check explicitly
final int numIters = randomIntBetween(3, 5);
for (int i = 0; i < numIters; i++) {
directory.setRandomIOExceptionRateOnOpen(randomDouble());
directory.setRandomIOExceptionRate(randomDouble());
directory.setFailOnOpenInput(randomBoolean());
directory.setAllowRandomFileNotFoundException(randomBoolean());
boolean started = false;
InternalEngine engine = null;
try {
engine = createEngine(store, translogPath);
started = true;
} catch (EngineException | IOException e) {
logger.trace("exception on open", e);
}
directory.setRandomIOExceptionRateOnOpen(0.0);
directory.setRandomIOExceptionRate(0.0);
directory.setFailOnOpenInput(false);
directory.setAllowRandomFileNotFoundException(false);
if (started) {
assertVisibleCount(engine, numDocs, false);
engine.close();
}
}
}
}
@Test
public void testTranslogCleanUpPostCommitCrash() throws Exception {
IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetadata(), defaultSettings.getNodeSettings(),
defaultSettings.getScopedSettings());
IndexMetadata.Builder builder = IndexMetadata.builder(indexSettings.getIndexMetadata());
builder.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), "-1")
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), "-1")
);
indexSettings.updateIndexMetadata(builder.build());
try (Store store = createStore()) {
AtomicBoolean throwErrorOnCommit = new AtomicBoolean();
final Path translogPath = createTempDir();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final LongSupplier globalCheckpointSupplier = globalCheckpoint::get;
store.createEmpty(Version.CURRENT.luceneVersion);
final String translogUUID = Translog.createEmptyTranslog(translogPath, globalCheckpoint.get(), shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
try (InternalEngine engine =
new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null,
globalCheckpointSupplier)) {
@Override
protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException {
super.commitIndexWriter(writer, translog, syncId);
if (throwErrorOnCommit.get()) {
throw new RuntimeException("power's out");
}
}
}) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
final ParsedDocument doc1 = testParsedDocument("1", null,
testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc1));
engine.syncTranslog(); // to advance local checkpoint
assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint());
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
throwErrorOnCommit.set(true);
FlushFailedEngineException e = expectThrows(FlushFailedEngineException.class, engine::flush);
assertThat(e.getCause().getMessage(), equalTo("power's out"));
}
try (InternalEngine engine =
new InternalEngine(config(indexSettings, store, translogPath, newMergePolicy(), null, null,
globalCheckpointSupplier))) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertVisibleCount(engine, 1);
final long localCheckpoint = Long.parseLong(
engine.getLastCommittedSegmentInfos().userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
final long committedGen = engine.getTranslog().getMinGenerationForSeqNo(localCheckpoint + 1).translogFileGeneration;
for (int gen = 1; gen < committedGen; gen++) {
final Path genFile = translogPath.resolve(Translog.getFilename(gen));
assertFalse(genFile + " wasn't cleaned up", Files.exists(genFile));
}
}
}
}
@Test
public void testSkipTranslogReplay() throws IOException {
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
EngineConfig config = engine.config();
assertVisibleCount(engine, numDocs);
engine.close();
try (InternalEngine engine = new InternalEngine(config)) {
engine.skipTranslogRecovery();
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), randomIntBetween(numDocs, numDocs + 10));
assertThat(topDocs.totalHits.value, equalTo(0L));
}
}
}
@Test
public void testTranslogReplay() throws IOException {
final LongSupplier inSyncGlobalCheckpointSupplier = () -> this.engine.getProcessedLocalCheckpoint();
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
}
assertVisibleCount(engine, numDocs);
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
engine.close();
// we need to reuse the engine config unless the parser.mappingModified won't work
engine = new InternalEngine(copy(engine.config(), inSyncGlobalCheckpointSupplier));
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
engine.refresh("warm_up");
assertVisibleCount(engine, numDocs, false);
engine.close();
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
engine.refresh("warm_up");
assertVisibleCount(engine, numDocs, false);
final boolean flush = randomBoolean();
int randomId = randomIntBetween(numDocs + 1, numDocs + 10);
ParsedDocument doc = testParsedDocument(Integer.toString(randomId), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 1,
VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexResult = engine.index(firstIndexRequest);
assertThat(indexResult.getVersion(), equalTo(1L));
if (flush) {
engine.flush();
engine.refresh("test");
}
doc = testParsedDocument(Integer.toString(randomId), null, testDocument(), new BytesArray("{}"), null);
Engine.Index idxRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, 2,
VersionType.EXTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult result = engine.index(idxRequest);
engine.refresh("test");
assertThat(result.getVersion(), equalTo(2L));
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
}
engine.close();
translogHandler = createTranslogHandler(engine.engineConfig.getIndexSettings());
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
engine.refresh("warm_up");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs + 1);
assertThat(topDocs.totalHits.value, equalTo(numDocs + 1L));
}
engine.delete(new Engine.Delete(
Integer.toString(randomId),
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
if (randomBoolean()) {
engine.close();
engine = createEngine(store, primaryTranslogDir, inSyncGlobalCheckpointSupplier);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), numDocs);
assertThat(topDocs.totalHits.value, equalTo((long) numDocs));
}
}
@Test
public void testRecoverFromForeignTranslog() throws IOException {
final int numDocs = randomIntBetween(1, 10);
for (int i = 0; i < numDocs; i++) {
ParsedDocument doc = testParsedDocument(Integer.toString(i), null, testDocument(), new BytesArray("{}"), null);
Engine.Index firstIndexRequest = new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_DELETED, VersionType.INTERNAL, PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult index = engine.index(firstIndexRequest);
assertThat(index.getVersion(), equalTo(1L));
}
assertVisibleCount(engine, numDocs);
Translog.TranslogGeneration generation = engine.getTranslog().getGeneration();
engine.close();
final Path badTranslogLog = createTempDir();
final String badUUID = Translog.createEmptyTranslog(badTranslogLog, SequenceNumbers.NO_OPS_PERFORMED, shardId, primaryTerm.get());
Translog translog = new Translog(
new TranslogConfig(shardId, badTranslogLog, INDEX_SETTINGS, BigArrays.NON_RECYCLING_INSTANCE),
badUUID, createTranslogDeletionPolicy(INDEX_SETTINGS), () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get, seqNo -> {});
translog.add(new Translog.Index(
"SomeBogusId",
0,
primaryTerm.get(),
"{}".getBytes(Charset.forName("UTF-8"))));
assertEquals(generation.translogFileGeneration, translog.currentFileGeneration());
translog.close();
EngineConfig config = engine.config();
/* create a TranslogConfig that has been created with a different UUID */
TranslogConfig translogConfig = new TranslogConfig(shardId, translog.location(), config.getIndexSettings(),
BigArrays.NON_RECYCLING_INSTANCE);
EngineConfig brokenConfig = new EngineConfig(
shardId,
allocationId.getId(),
threadPool,
config.getIndexSettings(),
store,
newMergePolicy(),
config.getAnalyzer(),
new CodecService(null, logger),
config.getEventListener(),
IndexSearcher.getDefaultQueryCache(),
IndexSearcher.getDefaultQueryCachingPolicy(),
translogConfig,
TimeValue.timeValueMinutes(5),
config.getExternalRefreshListener(),
config.getInternalRefreshListener(),
new NoneCircuitBreakerService(),
() -> UNASSIGNED_SEQ_NO,
() -> RetentionLeases.EMPTY,
primaryTerm::get,
tombstoneDocSupplier()
);
expectThrows(EngineCreationFailureException.class, () -> new InternalEngine(brokenConfig));
engine = createEngine(store, primaryTranslogDir); // and recover again!
assertVisibleCount(engine, numDocs, true);
}
@Test
public void testShardNotAvailableExceptionWhenEngineClosedConcurrently() throws IOException, InterruptedException {
AtomicReference<Exception> exception = new AtomicReference<>();
String operation = randomFrom("optimize", "refresh", "flush");
Thread mergeThread = new Thread() {
@Override
public void run() {
boolean stop = false;
logger.info("try with {}", operation);
while (stop == false) {
try {
switch (operation) {
case "optimize": {
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
break;
}
case "refresh": {
engine.refresh("test refresh");
break;
}
case "flush": {
engine.flush(true, false);
break;
}
}
} catch (Exception e) {
exception.set(e);
stop = true;
}
}
}
};
mergeThread.start();
engine.close();
mergeThread.join();
logger.info("exception caught: ", exception.get());
assertTrue("expected an Exception that signals shard is not available",
TransportActions.isShardNotAvailableException(exception.get()));
}
/**
* Tests that when the close method returns the engine is actually guaranteed to have cleaned up and that resources are closed
*/
@Test
public void testConcurrentEngineClosed() throws BrokenBarrierException, InterruptedException {
Thread[] closingThreads = new Thread[3];
CyclicBarrier barrier = new CyclicBarrier(1 + closingThreads.length + 1);
Thread failEngine = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
@Override
protected void doRun() throws Exception {
barrier.await();
engine.failEngine("test", new RuntimeException("test"));
}
});
failEngine.start();
for (int i = 0;i < closingThreads.length ; i++) {
boolean flushAndClose = randomBoolean();
closingThreads[i] = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
@Override
protected void doRun() throws Exception {
barrier.await();
if (flushAndClose) {
engine.flushAndClose();
} else {
engine.close();
}
// try to acquire the writer lock - i.e., everything is closed, we need to synchronize
// to avoid races between closing threads
synchronized (closingThreads) {
try (Lock ignored = store.directory().obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
// all good.
}
}
}
});
closingThreads[i].setName("closingThread_" + i);
closingThreads[i].start();
}
barrier.await();
failEngine.join();
for (Thread t : closingThreads) {
t.join();
}
}
private static class ThrowingIndexWriter extends IndexWriter {
private AtomicReference<Supplier<Exception>> failureToThrow = new AtomicReference<>();
ThrowingIndexWriter(Directory d, IndexWriterConfig conf) throws IOException {
super(d, conf);
}
@Override
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
maybeThrowFailure();
return super.addDocument(doc);
}
private void maybeThrowFailure() throws IOException {
if (failureToThrow.get() != null) {
Exception failure = failureToThrow.get().get();
clearFailure(); // one shot
if (failure instanceof RuntimeException) {
throw (RuntimeException) failure;
} else if (failure instanceof IOException) {
throw (IOException) failure;
} else {
assert false: "unsupported failure class: " + failure.getClass().getCanonicalName();
}
}
}
@Override
public long softUpdateDocument(Term term, Iterable<? extends IndexableField> doc, Field... softDeletes) throws IOException {
maybeThrowFailure();
return super.softUpdateDocument(term, doc, softDeletes);
}
@Override
public long deleteDocuments(Term... terms) throws IOException {
maybeThrowFailure();
return super.deleteDocuments(terms);
}
public void setThrowFailure(Supplier<Exception> failureSupplier) {
failureToThrow.set(failureSupplier);
}
public void clearFailure() {
failureToThrow.set(null);
}
}
@Test
public void testHandleDocumentFailure() throws Exception {
try (Store store = createStore()) {
final ParsedDocument doc1 = testParsedDocument("1", null, testDocumentWithTextField(), B_1, null);
final ParsedDocument doc2 = testParsedDocument("2", null, testDocumentWithTextField(), B_1, null);
final ParsedDocument doc3 = testParsedDocument("3", null, testDocumentWithTextField(), B_1, null);
AtomicReference<ThrowingIndexWriter> throwingIndexWriter = new AtomicReference<>();
try (InternalEngine engine = createEngine(
defaultSettings,
store,
createTempDir(),
NoMergePolicy.INSTANCE,
(directory, iwc) -> {
throwingIndexWriter.set(new ThrowingIndexWriter(directory, iwc));
return throwingIndexWriter.get();
})
) {
// test document failure while indexing
if (randomBoolean()) {
throwingIndexWriter.get().setThrowFailure(() -> new IOException("simulated"));
} else {
throwingIndexWriter.get().setThrowFailure(() -> new IllegalArgumentException("simulated max token length"));
}
// test index with document failure
Engine.IndexResult indexResult = engine.index(indexForDoc(doc1));
assertNotNull(indexResult.getFailure());
assertThat(indexResult.getSeqNo(), equalTo(0L));
assertThat(indexResult.getVersion(), equalTo(Versions.MATCH_ANY));
assertNotNull(indexResult.getTranslogLocation());
throwingIndexWriter.get().clearFailure();
indexResult = engine.index(indexForDoc(doc1));
assertThat(indexResult.getSeqNo(), equalTo(1L));
assertThat(indexResult.getVersion(), equalTo(1L));
assertNull(indexResult.getFailure());
assertNotNull(indexResult.getTranslogLocation());
engine.index(indexForDoc(doc2));
// test non document level failure is thrown
if (randomBoolean()) {
// simulate close by corruption
throwingIndexWriter.get().setThrowFailure(null);
UncheckedIOException uncheckedIOException = expectThrows(UncheckedIOException.class, () -> {
Engine.Index index = indexForDoc(doc3);
index.parsedDoc().rootDoc().add(new StoredField("foo", "bar") {
// this is a hack to add a failure during store document which triggers a tragic event
// and in turn fails the engine
@Override
public BytesRef binaryValue() {
throw new UncheckedIOException(new MockDirectoryWrapper.FakeIOException());
}
});
engine.index(index);
});
assertTrue(uncheckedIOException.getCause() instanceof MockDirectoryWrapper.FakeIOException);
} else {
// normal close
engine.close();
}
// now the engine is closed check we respond correctly
expectThrows(AlreadyClosedException.class, () -> engine.index(indexForDoc(doc1)));
expectThrows(AlreadyClosedException.class,
() -> engine.delete(new Engine.Delete(
"1",
newUid(doc1),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0)));
expectThrows(AlreadyClosedException.class,
() -> engine.noOp(
new Engine.NoOp(engine.getLocalCheckpointTracker().generateSeqNo(),
engine.config().getPrimaryTermSupplier().getAsLong(),
randomFrom(Engine.Operation.Origin.values()),
randomNonNegativeLong(),
"test")));
}
}
}
@Test
public void testDeleteWithFatalError() throws Exception {
final IllegalStateException tragicException = new IllegalStateException("fail to store tombstone");
try (Store store = createStore()) {
EngineConfig.TombstoneDocSupplier tombstoneDocSupplier = new EngineConfig.TombstoneDocSupplier() {
@Override
public ParsedDocument newDeleteTombstoneDoc(String id) {
ParsedDocument parsedDocument = tombstoneDocSupplier().newDeleteTombstoneDoc(id);
parsedDocument.rootDoc().add(new StoredField("foo", "bar") {
// this is a hack to add a failure during store document which triggers a tragic event
// and in turn fails the engine
@Override
public BytesRef binaryValue() {
throw tragicException;
}
});
return parsedDocument;
}
@Override
public ParsedDocument newNoopTombstoneDoc(String reason) {
return tombstoneDocSupplier().newNoopTombstoneDoc(reason);
}
};
EngineConfig config = config(this.engine.config(), store, createTempDir(), tombstoneDocSupplier);
try (InternalEngine engine = createEngine(config)) {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
expectThrows(IllegalStateException.class, () -> engine.delete(
new Engine.Delete(
"1",
newUid(doc),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
)));
assertTrue(engine.isClosed.get());
assertSame(tragicException, engine.failedEngine.get());
}
}
}
@Test
public void testDoubleDeliveryPrimary() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
final boolean create = randomBoolean();
Engine.Index operation = appendOnlyPrimary(doc, false, 1, create);
Engine.Index retry = appendOnlyPrimary(doc, true, 1, create);
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(indexResult.getTranslogLocation());
Engine.IndexResult retryResult = engine.index(retry);
assertLuceneOperations(engine, 1, create ? 0 : 1, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
} else {
Engine.IndexResult retryResult = engine.index(retry);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
assertNotNull(retryResult.getTranslogLocation());
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, create ? 0 : 1, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(2L));
assertNotNull(retryResult.getTranslogLocation());
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
operation = appendOnlyPrimary(doc, false, 1, create);
retry = appendOnlyPrimary(doc, true, 1, create);
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
} else {
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
@Test
public void testDoubleDeliveryReplicaAppendingAndDeleteOnly() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
Engine.Index retry = appendOnlyReplica(doc, true, 1, randomIntBetween(0, 5));
Engine.Delete delete = new Engine.Delete(
operation.id(),
operation.uid(),
Math.max(retry.seqNo(), operation.seqNo()) + 1,
operation.primaryTerm(),
operation.version() + 1,
operation.versionType(),
REPLICA,
operation.startTime() + 1,
UNASSIGNED_SEQ_NO,
0
);
// operations with a seq# equal or lower to the local checkpoint are not indexed to lucene
// and the version lookup is skipped
final boolean sameSeqNo = operation.seqNo() == retry.seqNo();
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(indexResult.getTranslogLocation());
engine.delete(delete);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
assertLuceneOperations(engine, 1, 0, 1);
Engine.IndexResult retryResult = engine.index(retry);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(sameSeqNo ? 1L : 2L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) > 0);
} else {
Engine.IndexResult retryResult = engine.index(retry);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
engine.delete(delete);
assertLuceneOperations(engine, 1, 0, 1);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(1L));
Engine.IndexResult indexResult = engine.index(operation);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(sameSeqNo ? 1L : 2L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) < 0);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(0, topDocs.totalHits.value);
}
}
@Test
public void testDoubleDeliveryReplicaAppendingOnly() throws IOException {
final Supplier<ParsedDocument> doc = () -> testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
boolean replicaOperationIsRetry = randomBoolean();
Engine.Index operation = appendOnlyReplica(doc.get(), replicaOperationIsRetry, 1, randomIntBetween(0, 5));
Engine.IndexResult result = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertEquals(0, engine.getNumVersionLookups());
assertNotNull(result.getTranslogLocation());
// promote to primary: first do refresh
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
final boolean create = randomBoolean();
operation = appendOnlyPrimary(doc.get(), false, 1, create);
Engine.Index retry = appendOnlyPrimary(doc.get(), true, 1, create);
if (randomBoolean()) {
// if the replica operation wasn't a retry, the operation arriving on the newly promoted primary must be a retry
if (replicaOperationIsRetry) {
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
} else {
Engine.IndexResult retryResult = engine.index(retry);
if (create) {
assertNull(retryResult.getTranslogLocation());
} else {
assertNotNull(retryResult.getTranslogLocation());
}
Engine.IndexResult indexResult = engine.index(operation);
if (create) {
assertNull(indexResult.getTranslogLocation());
} else {
assertNotNull(indexResult.getTranslogLocation());
}
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
@Test
public void testDoubleDeliveryReplica() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = replicaIndexForDoc(doc, 1, 20, false);
Engine.Index duplicate = replicaIndexForDoc(doc, 1, 20, true);
if (randomBoolean()) {
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(indexResult.getTranslogLocation());
if (randomBoolean()) {
engine.refresh("test");
}
Engine.IndexResult retryResult = engine.index(duplicate);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) > 0);
} else {
Engine.IndexResult retryResult = engine.index(duplicate);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
if (randomBoolean()) {
engine.refresh("test");
}
Engine.IndexResult indexResult = engine.index(operation);
assertLuceneOperations(engine, 1, 0, 0);
assertThatIfAssertionEnabled(engine.getNumVersionLookups(), is(0L));
assertNotNull(retryResult.getTranslogLocation());
assertTrue(retryResult.getTranslogLocation().compareTo(indexResult.getTranslogLocation()) < 0);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
List<Translog.Operation> ops = readAllOperationsInLucene(engine, createMapperService("test"));
assertThat(ops.stream().map(o -> o.seqNo()).collect(Collectors.toList()), hasItem(20L));
}
}
@Test
public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
boolean isRetry = false;
long autoGeneratedIdTimestamp = 0;
Engine.Index index = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
randomBoolean() ? Versions.MATCH_DELETED : Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0
);
Engine.IndexResult indexResult = engine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(),
null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
isRetry = true;
index = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0
);
indexResult = engine.index(index);
assertThat(indexResult.getVersion(), equalTo(1L));
assertNotEquals(indexResult.getSeqNo(), UNASSIGNED_SEQ_NO);
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(),
null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
indexResult = replicaEngine.index(index);
assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.SUCCESS));
replicaEngine.refresh("test");
try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
@Test
public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() throws IOException {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
boolean isRetry = true;
long autoGeneratedIdTimestamp = 0;
Engine.Index firstIndexRequest = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
randomBoolean() ? Versions.MATCH_DELETED : Versions.MATCH_ANY,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0
);
Engine.IndexResult result = engine.index(firstIndexRequest);
assertThat(result.getVersion(), equalTo(1L));
Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(),
result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
Engine.IndexResult indexReplicaResult = replicaEngine.index(firstIndexRequestReplica);
assertThat(indexReplicaResult.getVersion(), equalTo(1L));
isRetry = false;
Engine.Index secondIndexRequest = new Engine.Index(
newUid(doc),
doc,
UNASSIGNED_SEQ_NO,
0,
Versions.MATCH_DELETED,
VersionType.INTERNAL,
PRIMARY,
System.nanoTime(),
autoGeneratedIdTimestamp,
isRetry,
UNASSIGNED_SEQ_NO,
0);
Engine.IndexResult indexResult = engine.index(secondIndexRequest);
assertFalse(indexResult.isCreated());
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(),
result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry, UNASSIGNED_SEQ_NO, 0);
replicaEngine.index(secondIndexRequestReplica);
replicaEngine.refresh("test");
try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) {
TopDocs topDocs = searcher.search(new MatchAllDocsQuery(), 10);
assertEquals(1, topDocs.totalHits.value);
}
}
public Engine.Index randomAppendOnly(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp) {
if (randomBoolean()) {
return appendOnlyPrimary(doc, retry, autoGeneratedIdTimestamp);
} else {
return appendOnlyReplica(doc, retry, autoGeneratedIdTimestamp, 0);
}
}
public Engine.Index appendOnlyPrimary(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, boolean create) {
return new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 0, create ? Versions.MATCH_DELETED : Versions.MATCH_ANY,
VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(), autoGeneratedIdTimestamp, retry,
UNASSIGNED_SEQ_NO, 0);
}
public Engine.Index appendOnlyPrimary(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp) {
return appendOnlyPrimary(doc, retry, autoGeneratedIdTimestamp, randomBoolean());
}
public Engine.Index appendOnlyReplica(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, final long seqNo) {
return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, null,
Engine.Operation.Origin.REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, retry, UNASSIGNED_SEQ_NO, 0);
}
@Test
public void testAppendConcurrently() throws InterruptedException, IOException {
Thread[] thread = new Thread[randomIntBetween(3, 5)];
int numDocs = randomIntBetween(1000, 10000);
assertEquals(0, engine.getNumVersionLookups());
assertEquals(0, engine.getNumIndexVersionsLookups());
boolean primary = randomBoolean();
List<Engine.Index> docs = new ArrayList<>();
for (int i = 0; i < numDocs; i++) {
final ParsedDocument doc = testParsedDocument(Integer.toString(i), null,
testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index index = primary ? appendOnlyPrimary(doc, false, i) : appendOnlyReplica(doc, false, i, i);
docs.add(index);
}
Collections.shuffle(docs, random());
CountDownLatch startGun = new CountDownLatch(thread.length);
AtomicInteger offset = new AtomicInteger(-1);
for (int i = 0; i < thread.length; i++) {
thread[i] = new Thread() {
@Override
public void run() {
startGun.countDown();
try {
startGun.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
assertThat(engine.getVersionMap().values(), empty());
int docOffset;
while ((docOffset = offset.incrementAndGet()) < docs.size()) {
try {
engine.index(docs.get(docOffset));
} catch (IOException e) {
throw new AssertionError(e);
}
}
}
};
thread[i].start();
}
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
assertEquals("unexpected refresh", 0, searcher.getIndexReader().maxDoc());
}
for (int i = 0; i < thread.length; i++) {
thread[i].join();
}
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
int count = searcher.count(new MatchAllDocsQuery());
assertEquals(docs.size(), count);
}
assertEquals(0, engine.getNumVersionLookups());
assertEquals(0, engine.getNumIndexVersionsLookups());
assertThat(engine.getMaxSeenAutoIdTimestamp(),
equalTo(docs.stream().mapToLong(Engine.Index::getAutoGeneratedIdTimestamp).max().getAsLong()));
assertLuceneOperations(engine, numDocs, 0, 0);
}
public static long getNumVersionLookups(InternalEngine engine) { // for other tests to access this
return engine.getNumVersionLookups();
}
public static long getNumIndexVersionsLookups(InternalEngine engine) { // for other tests to access this
return engine.getNumIndexVersionsLookups();
}
@Test
public void testFailEngineOnRandomIO() throws IOException, InterruptedException {
MockDirectoryWrapper wrapper = newMockDirectory();
final Path translogPath = createTempDir("testFailEngineOnRandomIO");
try (Store store = createStore(wrapper)) {
CyclicBarrier join = new CyclicBarrier(2);
CountDownLatch start = new CountDownLatch(1);
AtomicInteger controller = new AtomicInteger(0);
EngineConfig config = config(defaultSettings, store, translogPath, newMergePolicy(), new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() throws IOException {
}
@Override
public void afterRefresh(boolean didRefresh) throws IOException {
int i = controller.incrementAndGet();
if (i == 1) {
throw new MockDirectoryWrapper.FakeIOException();
} else if (i == 2) {
try {
start.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
throw new ElasticsearchException("something completely different");
}
}
});
InternalEngine internalEngine = createEngine(config);
int docId = 0;
final ParsedDocument doc = testParsedDocument(Integer.toString(docId), null,
testDocumentWithTextField(), new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index index = randomBoolean() ? indexForDoc(doc) : randomAppendOnly(doc, false, docId);
internalEngine.index(index);
Runnable r = () -> {
try {
join.await();
} catch (Exception e) {
throw new AssertionError(e);
}
try {
internalEngine.refresh("test");
fail();
} catch (AlreadyClosedException ex) {
if (ex.getCause() != null) {
assertTrue(ex.toString(), ex.getCause() instanceof MockDirectoryWrapper.FakeIOException);
}
} catch (RefreshFailedEngineException ex) {
// fine
} finally {
start.countDown();
}
};
Thread t = new Thread(r);
Thread t1 = new Thread(r);
t.start();
t1.start();
t.join();
t1.join();
assertTrue(internalEngine.isClosed.get());
assertTrue(internalEngine.failedEngine.get() instanceof MockDirectoryWrapper.FakeIOException);
}
}
@Test
public void testSequenceIDs() throws Exception {
Tuple<Long, Long> seqID = getSequenceID(engine, new Engine.Get("type", newUid("1")));
// Non-existent doc returns no seqnum and no primary term
assertThat(seqID.v1(), equalTo(UNASSIGNED_SEQ_NO));
assertThat(seqID.v2(), equalTo(0L));
// create a document
Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
ParsedDocument doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
seqID = getSequenceID(engine, newGet(doc));
logger.info("--> got seqID: {}", seqID);
assertThat(seqID.v1(), equalTo(0L));
assertThat(seqID.v2(), equalTo(primaryTerm.get()));
// Index the same document again
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
seqID = getSequenceID(engine, newGet(doc));
logger.info("--> got seqID: {}", seqID);
assertThat(seqID.v1(), equalTo(1L));
assertThat(seqID.v2(), equalTo(primaryTerm.get()));
// Index the same document for the third time, this time changing the primary term
document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
doc = testParsedDocument("1", null, document, B_1, null);
engine.index(new Engine.Index(newUid(doc), doc, UNASSIGNED_SEQ_NO, 3,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY,
System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
engine.refresh("test");
seqID = getSequenceID(engine, newGet(doc));
logger.info("--> got seqID: {}", seqID);
assertThat(seqID.v1(), equalTo(2L));
assertThat(seqID.v2(), equalTo(3L));
// we can query by the _seq_no
Engine.Searcher searchResult = engine.acquireSearcher("test");
MatcherAssert.assertThat(searchResult, EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(1));
MatcherAssert.assertThat(searchResult,
EngineSearcherTotalHitsMatcher.engineSearcherTotalHits(LongPoint.newExactQuery("_seq_no", 2), 1));
searchResult.close();
}
@Test
public void testLookupSeqNoByIdInLucene() throws Exception {
int numOps = between(10, 100);
long seqNo = 0;
List<Engine.Operation> operations = new ArrayList<>(numOps);
for (int i = 0; i < numOps; i++) {
String id = Integer.toString(between(1, 50));
boolean isIndexing = randomBoolean();
int copies = frequently() ? 1 : between(2, 4);
for (int c = 0; c < copies; c++) {
final ParsedDocument doc = EngineTestCase.createParsedDoc(id, null);
if (isIndexing) {
operations.add(new Engine.Index(EngineTestCase.newUid(doc), doc, seqNo, primaryTerm.get(),
i, null, Engine.Operation.Origin.REPLICA, threadPool.relativeTimeInMillis(), -1, true, UNASSIGNED_SEQ_NO, 0L));
} else {
operations.add(new Engine.Delete(
doc.id(),
EngineTestCase.newUid(doc),
seqNo,
primaryTerm.get(),
i,
null,
Engine.Operation.Origin.REPLICA,
threadPool.relativeTimeInMillis(),
UNASSIGNED_SEQ_NO,
0L
));
}
}
seqNo++;
if (rarely()) {
seqNo++;
}
}
Randomness.shuffle(operations);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
Map<String, Engine.Operation> latestOps = new HashMap<>(); // id -> latest seq_no
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null))) {
CheckedRunnable<IOException> lookupAndCheck = () -> {
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
Map<String, Long> liveOps = latestOps.entrySet().stream()
.filter(e -> e.getValue().operationType() == Engine.Operation.TYPE.INDEX)
.collect(Collectors.toMap(e -> e.getKey(), e -> e.getValue().seqNo()));
assertThat(getDocIds(engine, true).stream().collect(Collectors.toMap(e -> e.getId(), e -> e.getSeqNo())),
equalTo(liveOps));
for (String id : latestOps.keySet()) {
String msg = "latestOps=" + latestOps + " op=" + id;
DocIdAndSeqNo docIdAndSeqNo = VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), newUid(id));
if (liveOps.containsKey(id) == false) {
assertNull(msg, docIdAndSeqNo);
} else {
assertNotNull(msg, docIdAndSeqNo);
assertThat(msg, docIdAndSeqNo.seqNo, equalTo(latestOps.get(id).seqNo()));
}
}
String notFoundId = randomValueOtherThanMany(liveOps::containsKey, () -> Long.toString(randomNonNegativeLong()));
assertNull(VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), newUid(notFoundId)));
}
};
for (Engine.Operation op : operations) {
if (op instanceof Engine.Index) {
engine.index((Engine.Index) op);
if (latestOps.containsKey(op.id()) == false || latestOps.get(op.id()).seqNo() < op.seqNo()) {
latestOps.put(op.id(), op);
}
} else if (op instanceof Engine.Delete) {
engine.delete((Engine.Delete) op);
if (latestOps.containsKey(op.id()) == false || latestOps.get(op.id()).seqNo() < op.seqNo()) {
latestOps.put(op.id(), op);
}
}
if (randomInt(100) < 10) {
engine.refresh("test");
lookupAndCheck.run();
}
if (rarely()) {
engine.flush(false, true);
lookupAndCheck.run();
}
}
engine.refresh("test");
lookupAndCheck.run();
}
}
/**
* A sequence number generator that will generate a sequence number and if {@code stall} is set to true will wait on the barrier and the
* referenced latch before returning. If the local checkpoint should advance (because {@code stall} is false, then the value of
* {@code expectedLocalCheckpoint} is set accordingly.
*
* @param latchReference to latch the thread for the purpose of stalling
* @param barrier to signal the thread has generated a new sequence number
* @param stall whether or not the thread should stall
* @param expectedLocalCheckpoint the expected local checkpoint after generating a new sequence
* number
* @return a sequence number generator
*/
private ToLongBiFunction<Engine, Engine.Operation> getStallingSeqNoGenerator(
final AtomicReference<CountDownLatch> latchReference,
final CyclicBarrier barrier,
final AtomicBoolean stall,
final AtomicLong expectedLocalCheckpoint) {
return (engine, operation) -> {
final long seqNo = generateNewSeqNo(engine);
final CountDownLatch latch = latchReference.get();
if (stall.get()) {
try {
barrier.await();
latch.await();
} catch (BrokenBarrierException | InterruptedException e) {
throw new RuntimeException(e);
}
} else {
if (expectedLocalCheckpoint.get() + 1 == seqNo) {
expectedLocalCheckpoint.set(seqNo);
}
}
return seqNo;
};
}
@Test
public void testSequenceNumberAdvancesToMaxSeqOnEngineOpenOnPrimary() throws BrokenBarrierException, InterruptedException, IOException {
engine.close();
final int docs = randomIntBetween(1, 32);
InternalEngine initialEngine = null;
try {
final AtomicReference<CountDownLatch> latchReference = new AtomicReference<>(new CountDownLatch(1));
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicBoolean stall = new AtomicBoolean();
final AtomicLong expectedLocalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final List<Thread> threads = new ArrayList<>();
initialEngine =
createEngine(defaultSettings, store, primaryTranslogDir,
newMergePolicy(), null, LocalCheckpointTracker::new, null,
getStallingSeqNoGenerator(latchReference, barrier, stall, expectedLocalCheckpoint));
final InternalEngine finalInitialEngine = initialEngine;
for (int i = 0; i < docs; i++) {
final String id = Integer.toString(i);
final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
stall.set(randomBoolean());
final Thread thread = new Thread(() -> {
try {
finalInitialEngine.index(indexForDoc(doc));
} catch (IOException e) {
throw new AssertionError(e);
}
});
thread.start();
if (stall.get()) {
threads.add(thread);
barrier.await();
} else {
thread.join();
}
}
assertThat(initialEngine.getProcessedLocalCheckpoint(), equalTo(expectedLocalCheckpoint.get()));
assertThat(initialEngine.getSeqNoStats(-1).getMaxSeqNo(), equalTo((long) (docs - 1)));
initialEngine.flush(true, true);
assertEquals(initialEngine.getProcessedLocalCheckpoint(), initialEngine.getPersistedLocalCheckpoint());
latchReference.get().countDown();
for (final Thread thread : threads) {
thread.join();
}
} finally {
IOUtils.close(initialEngine);
}
try (var recoveringEngine = new InternalEngine(initialEngine.config())) {
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
recoveringEngine.fillSeqNoGaps(2);
assertEquals(recoveringEngine.getProcessedLocalCheckpoint(), recoveringEngine.getPersistedLocalCheckpoint());
assertThat(recoveringEngine.getProcessedLocalCheckpoint(), greaterThanOrEqualTo((long) (docs - 1)));
}
}
@Test
public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOException {
final List<Engine.Operation> operations = new ArrayList<>();
final int numberOfOperations = randomIntBetween(16, 32);
final AtomicLong sequenceNumber = new AtomicLong();
final Engine.Operation.Origin origin = randomFrom(LOCAL_TRANSLOG_RECOVERY, PEER_RECOVERY, PRIMARY, REPLICA);
final LongSupplier sequenceNumberSupplier =
origin == PRIMARY ? () -> UNASSIGNED_SEQ_NO : sequenceNumber::getAndIncrement;
final Supplier<ParsedDocument> doc = () -> {
final Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
return testParsedDocument("1", null, document, B_1, null);
};
final Term uid = newUid("1");
final BiFunction<String, Engine.SearcherScope, Searcher> searcherFactory = engine::acquireSearcher;
for (int i = 0; i < numberOfOperations; i++) {
if (randomBoolean()) {
final Engine.Index index = new Engine.Index(
uid,
doc.get(),
sequenceNumberSupplier.getAsLong(),
1,
i,
origin == PRIMARY ? VersionType.EXTERNAL : null,
origin,
System.nanoTime(),
Translog.UNSET_AUTO_GENERATED_TIMESTAMP,
false, UNASSIGNED_SEQ_NO, 0);
operations.add(index);
} else {
final Engine.Delete delete = new Engine.Delete(
"1",
uid,
sequenceNumberSupplier.getAsLong(),
1,
i,
origin == PRIMARY ? VersionType.EXTERNAL : null,
origin,
System.nanoTime(), UNASSIGNED_SEQ_NO, 0);
operations.add(delete);
}
}
final boolean exists = operations.get(operations.size() - 1) instanceof Engine.Index;
Randomness.shuffle(operations);
for (final Engine.Operation operation : operations) {
if (operation instanceof Engine.Index) {
engine.index((Engine.Index) operation);
} else {
engine.delete((Engine.Delete) operation);
}
}
final long expectedLocalCheckpoint;
if (origin == PRIMARY) {
// we can only advance as far as the number of operations that did not conflict
int count = 0;
// each time the version increments as we walk the list, that counts as a successful operation
long version = -1;
for (int i = 0; i < numberOfOperations; i++) {
if (operations.get(i).version() >= version) {
count++;
version = operations.get(i).version();
}
}
// sequence numbers start at zero, so the expected local checkpoint is the number of successful operations minus one
expectedLocalCheckpoint = count - 1;
} else {
expectedLocalCheckpoint = numberOfOperations - 1;
}
assertThat(engine.getProcessedLocalCheckpoint(), equalTo(expectedLocalCheckpoint));
try (Engine.GetResult result = engine.get(new Engine.Get("2", uid), searcherFactory)) {
assertThat(result.docIdAndVersion() != null, equalTo(exists));
}
}
/**
* Test that we do not leak out information on a deleted doc due to it existing in version map. There are at least 2 cases:
* <ul>
* <li>Guessing the deleted seqNo makes the operation succeed</li>
* <li>Providing any other seqNo leaks info that the doc was deleted (and its SeqNo)</li>
* </ul>
*/
public void testVersionConflictIgnoreDeletedDoc() throws IOException {
ParsedDocument doc = testParsedDocument("1", null, testDocument(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
engine.delete(new Engine.Delete("1", newUid("1"), 1));
for (long seqNo : new long[]{0, 1, randomNonNegativeLong()}) {
assertDeletedVersionConflict(engine.index(new Engine.Index(newUid("1"), doc, UNASSIGNED_SEQ_NO, 1,
Versions.MATCH_ANY, VersionType.INTERNAL,
PRIMARY, randomNonNegativeLong(), UNSET_AUTO_GENERATED_TIMESTAMP, false, seqNo, 1)),
"update: " + seqNo);
assertDeletedVersionConflict(engine.delete(new Engine.Delete("1", newUid("1"), UNASSIGNED_SEQ_NO, 1,
Versions.MATCH_ANY, VersionType.INTERNAL, PRIMARY, randomNonNegativeLong(), seqNo, 1)),
"delete: " + seqNo);
}
}
private void assertDeletedVersionConflict(Engine.Result result, String operation) {
assertNotNull("Must have failure for " + operation, result.getFailure());
assertThat(operation, result.getFailure(), Matchers.instanceOf(VersionConflictEngineException.class));
VersionConflictEngineException exception = (VersionConflictEngineException) result.getFailure();
assertThat(operation, exception.getMessage(), containsString("but no document was found"));
}
/*
* This test tests that a no-op does not generate a new sequence number, that no-ops can advance the local checkpoint, and that no-ops
* are correctly added to the translog.
*/
@Test
public void testNoOps() throws IOException {
engine.close();
InternalEngine noOpEngine = null;
final int maxSeqNo = randomIntBetween(0, 128);
final int localCheckpoint = randomIntBetween(0, maxSeqNo);
try {
final BiFunction<Long, Long, LocalCheckpointTracker> supplier = (ms, lcp) -> new LocalCheckpointTracker(
maxSeqNo,
localCheckpoint);
EngineConfig noopEngineConfig = copy(engine.config(), new SoftDeletesRetentionMergePolicy(Lucene.SOFT_DELETES_FIELD,
() -> new MatchAllDocsQuery(), engine.config().getMergePolicy()));
noOpEngine = new InternalEngine(noopEngineConfig, supplier) {
@Override
protected long doGenerateSeqNoForOperation(Operation operation) {
throw new UnsupportedOperationException();
}
};
noOpEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
final int gapsFilled = noOpEngine.fillSeqNoGaps(primaryTerm.get());
final String reason = "filling gaps";
noOpEngine.noOp(new Engine.NoOp(maxSeqNo + 1, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), reason));
assertThat(noOpEngine.getProcessedLocalCheckpoint(), equalTo((long) (maxSeqNo + 1)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled));
noOpEngine.noOp(
new Engine.NoOp(maxSeqNo + 2, primaryTerm.get(),
randomFrom(PRIMARY, REPLICA, PEER_RECOVERY), System.nanoTime(), reason));
assertThat(noOpEngine.getProcessedLocalCheckpoint(), equalTo((long) (maxSeqNo + 2)));
assertThat(noOpEngine.getTranslog().stats().getUncommittedOperations(), equalTo(gapsFilled + 1));
// skip to the op that we added to the translog
Translog.Operation op;
Translog.Operation last = null;
try (Translog.Snapshot snapshot = noOpEngine.getTranslog().newSnapshot()) {
while ((op = snapshot.next()) != null) {
last = op;
}
}
assertNotNull(last);
assertThat(last, instanceOf(Translog.NoOp.class));
final Translog.NoOp noOp = (Translog.NoOp) last;
assertThat(noOp.seqNo(), equalTo((long) (maxSeqNo + 2)));
assertThat(noOp.primaryTerm(), equalTo(primaryTerm.get()));
assertThat(noOp.reason(), equalTo(reason));
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
MapperService mapperService = createMapperService("test");
List<Translog.Operation> operationsFromLucene = readAllOperationsInLucene(noOpEngine, mapperService);
assertThat(operationsFromLucene, hasSize(maxSeqNo + 2 - localCheckpoint)); // fills n gap and 2 manual noop.
for (int i = 0; i < operationsFromLucene.size(); i++) {
assertThat(operationsFromLucene.get(i),
equalTo(new Translog.NoOp(localCheckpoint + 1 + i, primaryTerm.get(), "filling gaps")));
}
assertConsistentHistoryBetweenTranslogAndLuceneIndex(noOpEngine, mapperService);
}
} finally {
IOUtils.close(noOpEngine);
}
}
/**
* Verifies that a segment containing only no-ops can be used to look up _version and _seqno.
*/
@Test
public void testSegmentContainsOnlyNoOps() throws Exception {
Engine.NoOpResult noOpResult = engine.noOp(new Engine.NoOp(1, primaryTerm.get(),
randomFrom(Engine.Operation.Origin.values()), randomNonNegativeLong(), "test"));
assertThat(noOpResult.getFailure(), nullValue());
engine.refresh("test");
Engine.DeleteResult deleteResult = engine.delete(replicaDeleteForDoc("id", 1, 2, randomNonNegativeLong()));
assertThat(deleteResult.getFailure(), nullValue());
engine.refresh("test");
}
/**
* A simple test to check that random combination of operations can coexist in segments and be lookup.
* This is needed as some fields in Lucene may not exist if a segment misses operation types and this code is to check for that.
* For example, a segment containing only no-ops does not have neither _uid or _version.
*/
@Test
public void testRandomOperations() throws Exception {
int numOps = between(10, 100);
for (int i = 0; i < numOps; i++) {
String id = Integer.toString(randomIntBetween(1, 10));
ParsedDocument doc = createParsedDoc(id, null);
Engine.Operation.TYPE type = randomFrom(Engine.Operation.TYPE.values());
switch (type) {
case INDEX:
Engine.IndexResult index = engine.index(replicaIndexForDoc(doc, between(1, 100), i, randomBoolean()));
assertThat(index.getFailure(), nullValue());
break;
case DELETE:
Engine.DeleteResult delete = engine.delete(replicaDeleteForDoc(doc.id(), between(1, 100), i, randomNonNegativeLong()));
assertThat(delete.getFailure(), nullValue());
break;
case NO_OP:
Engine.NoOpResult noOp = engine.noOp(new Engine.NoOp(i, primaryTerm.get(),
randomFrom(Engine.Operation.Origin.values()), randomNonNegativeLong(), ""));
assertThat(noOp.getFailure(), nullValue());
break;
default:
throw new IllegalStateException("Invalid op [" + type + "]");
}
if (randomBoolean()) {
engine.refresh("test");
}
if (randomBoolean()) {
engine.flush();
}
if (randomBoolean()) {
engine.forceMerge(randomBoolean(), between(1, 10), randomBoolean(), false, false, UUIDs.randomBase64UUID());
}
}
if (engine.engineConfig.getIndexSettings().isSoftDeleteEnabled()) {
List<Translog.Operation> operations = readAllOperationsInLucene(engine, createMapperService("test"));
assertThat(operations, hasSize(numOps));
}
}
@Test
public void testMinGenerationForSeqNo() throws IOException, BrokenBarrierException, InterruptedException {
engine.close();
final int numberOfTriplets = randomIntBetween(1, 32);
InternalEngine actualEngine = null;
try {
final AtomicReference<CountDownLatch> latchReference = new AtomicReference<>();
final CyclicBarrier barrier = new CyclicBarrier(2);
final AtomicBoolean stall = new AtomicBoolean();
final AtomicLong expectedLocalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final Map<Thread, CountDownLatch> threads = new LinkedHashMap<>();
actualEngine =
createEngine(defaultSettings, store, primaryTranslogDir,
newMergePolicy(), null, LocalCheckpointTracker::new, null,
getStallingSeqNoGenerator(latchReference, barrier, stall, expectedLocalCheckpoint));
final InternalEngine finalActualEngine = actualEngine;
final Translog translog = finalActualEngine.getTranslog();
final long generation = finalActualEngine.getTranslog().currentFileGeneration();
for (int i = 0; i < numberOfTriplets; i++) {
/*
* Index three documents with the first and last landing in the same generation and the middle document being stalled until
* a later generation.
*/
stall.set(false);
index(finalActualEngine, 3 * i);
final CountDownLatch latch = new CountDownLatch(1);
latchReference.set(latch);
final int skipId = 3 * i + 1;
stall.set(true);
final Thread thread = new Thread(() -> {
try {
index(finalActualEngine, skipId);
} catch (IOException e) {
throw new AssertionError(e);
}
});
thread.start();
threads.put(thread, latch);
barrier.await();
stall.set(false);
index(finalActualEngine, 3 * i + 2);
finalActualEngine.flush();
/*
* This sequence number landed in the last generation, but the lower and upper bounds for an earlier generation straddle
* this sequence number.
*/
assertThat(translog.getMinGenerationForSeqNo(3 * i + 1).translogFileGeneration, equalTo(i + generation));
}
int i = 0;
for (final Map.Entry<Thread, CountDownLatch> entry : threads.entrySet()) {
final Map<String, String> userData = finalActualEngine.commitStats().getUserData();
assertThat(userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY), equalTo(Long.toString(3 * i)));
entry.getValue().countDown();
entry.getKey().join();
finalActualEngine.flush();
i++;
}
} finally {
IOUtils.close(actualEngine);
}
}
private void index(final InternalEngine engine, final int id) throws IOException {
final String docId = Integer.toString(id);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
}
/**
* Return a tuple representing the sequence ID for the given {@code Get}
* operation. The first value in the tuple is the sequence number, the
* second is the primary term.
*/
private Tuple<Long, Long> getSequenceID(Engine engine, Engine.Get get) throws EngineException {
try (Searcher searcher = engine.acquireSearcher("get", Engine.SearcherScope.INTERNAL)) {
final long primaryTerm;
final long seqNo;
DocIdAndSeqNo docIdAndSeqNo = VersionsAndSeqNoResolver.loadDocIdAndSeqNo(searcher.getIndexReader(), get.uid());
if (docIdAndSeqNo == null) {
primaryTerm = 0;
seqNo = UNASSIGNED_SEQ_NO;
} else {
seqNo = docIdAndSeqNo.seqNo;
NumericDocValues primaryTerms = docIdAndSeqNo.context.reader().getNumericDocValues(SeqNoFieldMapper.PRIMARY_TERM_NAME);
if (primaryTerms == null || primaryTerms.advanceExact(docIdAndSeqNo.docId) == false) {
throw new AssertionError("document does not have primary term [" + docIdAndSeqNo.docId + "]");
}
primaryTerm = primaryTerms.longValue();
}
return new Tuple<>(seqNo, primaryTerm);
} catch (Exception e) {
throw new EngineException(shardId, "unable to retrieve sequence id", e);
}
}
@Test
public void testRestoreLocalHistoryFromTranslog() throws IOException {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore()) {
final ArrayList<Long> seqNos = new ArrayList<>();
final int numOps = randomIntBetween(0, 1024);
for (int i = 0; i < numOps; i++) {
if (rarely()) {
continue;
}
seqNos.add((long) i);
}
Randomness.shuffle(seqNos);
final EngineConfig engineConfig;
final SeqNoStats prevSeqNoStats;
final List<DocIdSeqNoAndSource> prevDocs;
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
engineConfig = engine.config();
for (final long seqNo : seqNos) {
final String id = Long.toString(seqNo);
final ParsedDocument doc = testParsedDocument(id, null,
testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 1, seqNo, false));
if (rarely()) {
engine.rollTranslogGeneration();
}
if (rarely()) {
engine.flush();
}
}
globalCheckpoint.set(randomLongBetween(SequenceNumbers.NO_OPS_PERFORMED, engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
prevSeqNoStats = engine.getSeqNoStats(globalCheckpoint.get());
prevDocs = getDocIds(engine, true);
}
try (InternalEngine engine = new InternalEngine(engineConfig)) {
final long currentTranslogGeneration = engine.getTranslog().currentFileGeneration();
engine.recoverFromTranslog(translogHandler, globalCheckpoint.get());
engine.restoreLocalHistoryFromTranslog(translogHandler);
assertThat(getDocIds(engine, true), equalTo(prevDocs));
SeqNoStats seqNoStats = engine.getSeqNoStats(globalCheckpoint.get());
assertThat(seqNoStats.getLocalCheckpoint(), equalTo(prevSeqNoStats.getLocalCheckpoint()));
assertThat(seqNoStats.getMaxSeqNo(), equalTo(prevSeqNoStats.getMaxSeqNo()));
assertThat("restore from local translog must not add operations to translog",
engine.getTranslog().totalOperationsByMinGen(currentTranslogGeneration), equalTo(0));
}
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, createMapperService("test"));
}
}
@Test
public void testFillUpSequenceIdGapsOnRecovery() throws IOException {
final int docs = randomIntBetween(1, 32);
int numDocsOnReplica = 0;
long maxSeqIDOnReplica = -1;
long checkpointOnReplica;
try {
for (int i = 0; i < docs; i++) {
final String docId = Integer.toString(i);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
Engine.Index primaryResponse = indexForDoc(doc);
Engine.IndexResult indexResult = engine.index(primaryResponse);
if (randomBoolean()) {
numDocsOnReplica++;
maxSeqIDOnReplica = indexResult.getSeqNo();
replicaEngine.index(replicaIndexForDoc(doc, 1, indexResult.getSeqNo(), false));
}
}
engine.syncTranslog(); // to advance local checkpoint
replicaEngine.syncTranslog(); // to advance local checkpoint
checkpointOnReplica = replicaEngine.getProcessedLocalCheckpoint();
} finally {
IOUtils.close(replicaEngine);
}
boolean flushed = false;
AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
InternalEngine recoveringEngine = null;
try {
assertEquals(docs - 1, engine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(docs - 1, engine.getProcessedLocalCheckpoint());
assertEquals(maxSeqIDOnReplica, replicaEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(checkpointOnReplica, replicaEngine.getProcessedLocalCheckpoint());
recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get));
assertEquals(numDocsOnReplica, getTranslog(recoveringEngine).stats().getUncommittedOperations());
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(checkpointOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
assertEquals((maxSeqIDOnReplica + 1) - numDocsOnReplica, recoveringEngine.fillSeqNoGaps(2));
// now snapshot the tlog and ensure the primary term is updated
try (Translog.Snapshot snapshot = getTranslog(recoveringEngine).newSnapshot()) {
assertTrue((maxSeqIDOnReplica + 1) - numDocsOnReplica <= snapshot.totalOperations());
Translog.Operation operation;
while ((operation = snapshot.next()) != null) {
if (operation.opType() == Translog.Operation.Type.NO_OP) {
assertEquals(2, operation.primaryTerm());
} else {
assertEquals(primaryTerm.get(), operation.primaryTerm());
}
}
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(maxSeqIDOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
if ((flushed = randomBoolean())) {
globalCheckpoint.set(recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
getTranslog(recoveringEngine).sync();
recoveringEngine.flush(true, true);
}
}
} finally {
IOUtils.close(recoveringEngine);
}
// now do it again to make sure we preserve values etc.
try {
recoveringEngine = new InternalEngine(copy(replicaEngine.config(), globalCheckpoint::get));
if (flushed) {
assertThat(recoveringEngine.getTranslogStats().getUncommittedOperations(), equalTo(0));
}
recoveringEngine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(maxSeqIDOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
assertEquals(0, recoveringEngine.fillSeqNoGaps(3));
assertEquals(maxSeqIDOnReplica, recoveringEngine.getSeqNoStats(-1).getMaxSeqNo());
assertEquals(maxSeqIDOnReplica, recoveringEngine.getProcessedLocalCheckpoint());
} finally {
IOUtils.close(recoveringEngine);
}
}
public void assertSameReader(Searcher left, Searcher right) {
List<LeafReaderContext> leftLeaves = ElasticsearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves();
List<LeafReaderContext> rightLeaves = ElasticsearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves();
assertEquals(rightLeaves.size(), leftLeaves.size());
for (int i = 0; i < leftLeaves.size(); i++) {
assertSame(leftLeaves.get(i).reader(), rightLeaves.get(i).reader());
}
}
public void assertNotSameReader(Searcher left, Searcher right) {
List<LeafReaderContext> leftLeaves = ElasticsearchDirectoryReader.unwrap(left.getDirectoryReader()).leaves();
List<LeafReaderContext> rightLeaves = ElasticsearchDirectoryReader.unwrap(right.getDirectoryReader()).leaves();
if (rightLeaves.size() == leftLeaves.size()) {
for (int i = 0; i < leftLeaves.size(); i++) {
if (leftLeaves.get(i).reader() != rightLeaves.get(i).reader()) {
return; // all is well
}
}
fail("readers are same");
}
}
@Test
public void testRefreshScopedSearcher() throws IOException {
try (Store store = createStore();
InternalEngine engine =
// disable merges to make sure that the reader doesn't change unexpectedly during the test
createEngine(defaultSettings, store, createTempDir(), NoMergePolicy.INSTANCE)) {
engine.refresh("warm_up");
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertSameReader(getSearcher, searchSearcher);
}
for (int i = 0; i < 10; i++) {
final String docId = Integer.toString(i);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
Engine.Index primaryResponse = indexForDoc(doc);
engine.index(primaryResponse);
}
assertTrue(engine.refreshNeeded());
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertEquals(10, getSearcher.getIndexReader().numDocs());
assertEquals(0, searchSearcher.getIndexReader().numDocs());
assertNotSameReader(getSearcher, searchSearcher);
}
engine.refresh("test", Engine.SearcherScope.EXTERNAL, true);
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertEquals(10, getSearcher.getIndexReader().numDocs());
assertEquals(10, searchSearcher.getIndexReader().numDocs());
assertSameReader(getSearcher, searchSearcher);
}
// now ensure external refreshes are reflected on the internal reader
final String docId = Integer.toString(10);
final ParsedDocument doc =
testParsedDocument(docId, null, testDocumentWithTextField(), SOURCE, null);
Engine.Index primaryResponse = indexForDoc(doc);
engine.index(primaryResponse);
engine.refresh("test", Engine.SearcherScope.EXTERNAL, true);
try (Searcher getSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL);
Searcher searchSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertEquals(11, getSearcher.getIndexReader().numDocs());
assertEquals(11, searchSearcher.getIndexReader().numDocs());
assertSameReader(getSearcher, searchSearcher);
}
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
try (Searcher nextSearcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
assertSame(searcher.getIndexReader(), nextSearcher.getIndexReader());
}
}
try (Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
engine.refresh("test", Engine.SearcherScope.EXTERNAL, true);
try (Searcher nextSearcher = engine.acquireSearcher("test", Engine.SearcherScope.EXTERNAL)) {
assertSame(searcher.getIndexReader(), nextSearcher.getIndexReader());
}
}
}
}
@Test
public void testSeqNoGenerator() throws IOException {
engine.close();
final long seqNo = randomIntBetween(Math.toIntExact(SequenceNumbers.NO_OPS_PERFORMED), Integer.MAX_VALUE);
final BiFunction<Long, Long, LocalCheckpointTracker> localCheckpointTrackerSupplier = (ms, lcp) -> new LocalCheckpointTracker(
SequenceNumbers.NO_OPS_PERFORMED,
SequenceNumbers.NO_OPS_PERFORMED);
final AtomicLong seqNoGenerator = new AtomicLong(seqNo);
try (Engine e = createEngine(defaultSettings, store, primaryTranslogDir,
newMergePolicy(), null, localCheckpointTrackerSupplier,
null, (engine, operation) -> seqNoGenerator.getAndIncrement())) {
final String id = "id";
final Field uidField = new Field("_id", id, IdFieldMapper.Defaults.FIELD_TYPE);
final Field versionField = new NumericDocValuesField("_version", 0);
final SeqNoFieldMapper.SequenceIDFields seqID = SeqNoFieldMapper.SequenceIDFields.emptySeqID();
final ParseContext.Document document = new ParseContext.Document();
document.add(uidField);
document.add(versionField);
document.add(seqID.seqNo);
document.add(seqID.seqNoDocValue);
document.add(seqID.primaryTerm);
final BytesReference source = new BytesArray(new byte[]{1});
final ParsedDocument parsedDocument = new ParsedDocument(
versionField,
seqID,
id,
"routing",
Collections.singletonList(document),
source,
null);
final Engine.Index index = new Engine.Index(
new Term("_id", parsedDocument.id()),
parsedDocument,
UNASSIGNED_SEQ_NO,
randomIntBetween(1, 8),
Versions.NOT_FOUND,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
-1,
randomBoolean(),
UNASSIGNED_SEQ_NO,
0);
final Engine.IndexResult indexResult = e.index(index);
assertThat(indexResult.getSeqNo(), equalTo(seqNo));
assertThat(seqNoGenerator.get(), equalTo(seqNo + 1));
final Engine.Delete delete = new Engine.Delete(
id,
new Term("_id", parsedDocument.id()),
UNASSIGNED_SEQ_NO,
randomIntBetween(1, 8),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0);
final Engine.DeleteResult deleteResult = e.delete(delete);
assertThat(deleteResult.getSeqNo(), equalTo(seqNo + 1));
assertThat(seqNoGenerator.get(), equalTo(seqNo + 2));
}
}
@Test
public void testKeepTranslogAfterGlobalCheckpoint() throws Exception {
IOUtils.close(engine, store);
final IndexSettings indexSettings = new IndexSettings(defaultSettings.getIndexMetadata(), defaultSettings.getNodeSettings(),
defaultSettings.getScopedSettings());
IndexMetadata.Builder builder = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), randomFrom("-1", "100micros", "30m"))
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), randomFrom("-1", "512b", "1gb")));
indexSettings.updateIndexMetadata(builder.build());
final Path translogPath = createTempDir();
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
store.createEmpty(Version.CURRENT.luceneVersion);
final String translogUUID = Translog.createEmptyTranslog(translogPath, globalCheckpoint.get(), shardId, primaryTerm.get());
store.associateIndexWithNewTranslog(translogUUID);
final EngineConfig engineConfig = config(indexSettings, store, translogPath,
NoMergePolicy.INSTANCE, null, null, () -> globalCheckpoint.get());
final AtomicLong lastSyncedGlobalCheckpointBeforeCommit = new AtomicLong(Translog.readGlobalCheckpoint(translogPath, translogUUID));
try (InternalEngine engine = new InternalEngine(engineConfig) {
@Override
protected void commitIndexWriter(IndexWriter writer, Translog translog, String syncId) throws IOException {
lastSyncedGlobalCheckpointBeforeCommit.set(Translog.readGlobalCheckpoint(translogPath, translogUUID));
// Advance the global checkpoint during the flush to create a lag between a persisted global checkpoint in the translog
// (this value is visible to the deletion policy) and an in memory global checkpoint in the SequenceNumbersService.
if (rarely()) {
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), getPersistedLocalCheckpoint()));
}
super.commitIndexWriter(writer, translog, syncId);
}
}) {
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
int numDocs = scaledRandomIntBetween(10, 100);
for (int docId = 0; docId < numDocs; docId++) {
ParseContext.Document document = testDocumentWithTextField();
document.add(new Field(SourceFieldMapper.NAME, BytesReference.toBytes(B_1), SourceFieldMapper.Defaults.FIELD_TYPE));
engine.index(indexForDoc(testParsedDocument(Integer.toString(docId), null, document, B_1, null)));
if (frequently()) {
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
}
if (frequently()) {
engine.flush(randomBoolean(), true);
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
// Keep only one safe commit as the oldest commit.
final IndexCommit safeCommit = commits.get(0);
if (lastSyncedGlobalCheckpointBeforeCommit.get() == UNASSIGNED_SEQ_NO) {
// If the global checkpoint is still unassigned, we keep an empty(eg. initial) commit as a safe commit.
assertThat(Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
equalTo(SequenceNumbers.NO_OPS_PERFORMED));
} else {
assertThat(Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
lessThanOrEqualTo(lastSyncedGlobalCheckpointBeforeCommit.get()));
}
for (int i = 1; i < commits.size(); i++) {
assertThat(Long.parseLong(commits.get(i).getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
greaterThan(lastSyncedGlobalCheckpointBeforeCommit.get()));
}
// Make sure we keep all translog operations after the local checkpoint of the safe commit.
long localCheckpointFromSafeCommit = Long.parseLong(safeCommit.getUserData().get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot, SnapshotMatchers.containsSeqNoRange(localCheckpointFromSafeCommit + 1, docId));
}
}
}
}
}
@Test
public void testConcurrentAppendUpdateAndRefresh() throws InterruptedException, IOException {
int numDocs = scaledRandomIntBetween(100, 1000);
CountDownLatch latch = new CountDownLatch(2);
AtomicBoolean done = new AtomicBoolean(false);
AtomicInteger numDeletes = new AtomicInteger();
Thread thread = new Thread(() -> {
try {
latch.countDown();
latch.await();
for (int j = 0; j < numDocs; j++) {
String docID = Integer.toString(j);
ParsedDocument doc = testParsedDocument(docID, null, testDocumentWithTextField(),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = appendOnlyPrimary(doc, false, 1);
engine.index(operation);
if (rarely()) {
engine.delete(new Engine.Delete(
operation.id(),
operation.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
numDeletes.incrementAndGet();
} else {
doc = testParsedDocument(docID, null, testDocumentWithTextField("updated"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index update = indexForDoc(doc);
engine.index(update);
}
}
} catch (Exception e) {
throw new AssertionError(e);
} finally {
done.set(true);
}
});
thread.start();
latch.countDown();
latch.await();
while (done.get() == false) {
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
}
thread.join();
engine.refresh("test", Engine.SearcherScope.INTERNAL, true);
try (Engine.Searcher searcher = engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL)) {
TopDocs search = searcher.search(new MatchAllDocsQuery(), searcher.getIndexReader().numDocs());
for (int i = 0; i < search.scoreDocs.length; i++) {
org.apache.lucene.document.Document luceneDoc = searcher.doc(search.scoreDocs[i].doc);
assertEquals("updated", luceneDoc.get("value"));
}
int totalNumDocs = numDocs - numDeletes.get();
assertEquals(totalNumDocs, searcher.getIndexReader().numDocs());
}
}
@Test
public void testAcquireIndexCommit() throws Exception {
IOUtils.close(engine, store);
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
int numDocs = between(1, 20);
for (int i = 0; i < numDocs; i++) {
index(engine, i);
}
if (randomBoolean()) {
globalCheckpoint.set(numDocs - 1);
}
final boolean flushFirst = randomBoolean();
final boolean safeCommit = randomBoolean();
final Engine.IndexCommitRef snapshot;
if (safeCommit) {
snapshot = engine.acquireSafeIndexCommit();
} else {
snapshot = engine.acquireLastIndexCommit(flushFirst);
}
int moreDocs = between(1, 20);
for (int i = 0; i < moreDocs; i++) {
index(engine, numDocs + i);
}
globalCheckpoint.set(numDocs + moreDocs - 1);
engine.flush();
// check that we can still read the commit that we captured
try (IndexReader reader = DirectoryReader.open(snapshot.getIndexCommit())) {
assertThat(reader.numDocs(), equalTo(flushFirst && safeCommit == false ? numDocs : 0));
}
assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(2));
snapshot.close();
// check it's clean up
engine.flush(true, true);
assertThat(DirectoryReader.listCommits(engine.store.directory()), hasSize(1));
}
}
@Test
public void testCleanUpCommitsWhenGlobalCheckpointAdvanced() throws Exception {
IOUtils.close(engine, store);
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test",
Settings.builder().put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_SIZE_SETTING.getKey(), -1)
.put(IndexSettings.INDEX_TRANSLOG_RETENTION_AGE_SETTING.getKey(), -1).build());
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (Store store = createStore();
InternalEngine engine =
createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(),
null, null, globalCheckpoint::get))) {
final int numDocs = scaledRandomIntBetween(10, 100);
for (int docId = 0; docId < numDocs; docId++) {
index(engine, docId);
if (rarely()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush(false, randomBoolean());
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
engine.syncTranslog();
List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
assertThat(Long.parseLong(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
lessThanOrEqualTo(globalCheckpoint.get()));
for (int i = 1; i < commits.size(); i++) {
assertThat(Long.parseLong(commits.get(i).getUserData().get(SequenceNumbers.MAX_SEQ_NO)),
greaterThan(globalCheckpoint.get()));
}
// Global checkpoint advanced enough - only the last commit is kept.
globalCheckpoint.set(randomLongBetween(engine.getPersistedLocalCheckpoint(), Long.MAX_VALUE));
engine.syncTranslog();
assertThat(DirectoryReader.listCommits(store.directory()), contains(commits.get(commits.size() - 1)));
assertThat(engine.getTranslog().totalOperations(), equalTo(0));
}
}
@Test
public void testCleanupCommitsWhenReleaseSnapshot() throws Exception {
IOUtils.close(engine, store);
store = createStore();
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
try (InternalEngine engine = createEngine(store, createTempDir(), globalCheckpoint::get)) {
final int numDocs = scaledRandomIntBetween(10, 100);
for (int docId = 0; docId < numDocs; docId++) {
index(engine, docId);
if (frequently()) {
engine.flush(randomBoolean(), true);
}
}
engine.flush(false, randomBoolean());
int numSnapshots = between(1, 10);
final List<Engine.IndexCommitRef> snapshots = new ArrayList<>();
for (int i = 0; i < numSnapshots; i++) {
snapshots.add(engine.acquireSafeIndexCommit()); // taking snapshots from the safe commit.
}
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.syncTranslog();
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
for (int i = 0; i < numSnapshots - 1; i++) {
snapshots.get(i).close();
// pending snapshots - should not release any commit.
assertThat(DirectoryReader.listCommits(store.directory()), equalTo(commits));
}
snapshots.get(numSnapshots - 1).close(); // release the last snapshot - delete all except the last commit
assertThat(DirectoryReader.listCommits(store.directory()), hasSize(1));
}
}
@Test
public void testShouldPeriodicallyFlush() throws Exception {
assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false));
// A new engine may have more than one empty translog files - the test should account this extra.
final Translog translog = engine.getTranslog();
final IntSupplier uncommittedTranslogOperationsSinceLastCommit = () -> {
long localCheckpoint = Long.parseLong(engine.getLastCommittedSegmentInfos().userData.get(SequenceNumbers.LOCAL_CHECKPOINT_KEY));
return translog.totalOperationsByMinGen(translog.getMinGenerationForSeqNo(localCheckpoint + 1).translogFileGeneration);
};
final long extraTranslogSizeInNewEngine =
engine.getTranslog().stats().getUncommittedSizeInBytes() - Translog.DEFAULT_HEADER_SIZE_IN_BYTES;
int numDocs = between(10, 100);
for (int id = 0; id < numDocs; id++) {
final ParsedDocument doc =
testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
}
assertThat("Not exceeded translog flush threshold yet", engine.shouldPeriodicallyFlush(), equalTo(false));
long flushThreshold = RandomNumbers.randomLongBetween(random(), 120,
engine.getTranslog().stats().getUncommittedSizeInBytes()- extraTranslogSizeInNewEngine);
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), flushThreshold + "b")).build();
indexSettings.updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(numDocs));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
engine.flush();
assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(0));
// Stale operations skipped by Lucene but added to translog - still able to flush
for (int id = 0; id < numDocs; id++) {
final ParsedDocument doc =
testParsedDocument(Integer.toString(id), null, testDocumentWithTextField(), SOURCE, null);
final Engine.IndexResult result = engine.index(replicaIndexForDoc(doc, 1L, id, false));
assertThat(result.isCreated(), equalTo(false));
}
SegmentInfos lastCommitInfo = engine.getLastCommittedSegmentInfos();
assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(numDocs));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
engine.flush(false, false);
assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
assertThat(uncommittedTranslogOperationsSinceLastCommit.getAsInt(), equalTo(0));
// If the new index commit still points to the same translog generation as the current index commit,
// we should not enable the periodically flush condition; otherwise we can get into an infinite loop of flushes.
generateNewSeqNo(engine); // create a gap here
for (int id = 0; id < numDocs; id++) {
if (randomBoolean()) {
translog.rollGeneration();
}
final ParsedDocument doc =
testParsedDocument("new" + id, null, testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 2L, generateNewSeqNo(engine), false));
if (engine.shouldPeriodicallyFlush()) {
engine.flush();
assertThat(engine.getLastCommittedSegmentInfos(), not(sameInstance(lastCommitInfo)));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
}
}
}
@Test
public void testShouldPeriodicallyFlushAfterMerge() throws Exception {
assertThat("Empty engine does not need flushing", engine.shouldPeriodicallyFlush(), equalTo(false));
ParsedDocument doc =
testParsedDocument(Integer.toString(0), null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
engine.refresh("test");
assertThat("Not exceeded translog flush threshold yet", engine.shouldPeriodicallyFlush(), equalTo(false));
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_FLUSH_AFTER_MERGE_THRESHOLD_SIZE_SETTING.getKey(), "0b")).build();
indexSettings.updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(1));
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
doc = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null);
engine.index(indexForDoc(doc));
assertThat(engine.getTranslog().stats().getUncommittedOperations(), equalTo(2));
engine.refresh("test");
engine.forceMerge(false, 1, false, false, false, UUIDs.randomBase64UUID());
assertBusy(() -> {
// the merge listner runs concurrently after the force merge returned
assertThat(engine.shouldPeriodicallyFlush(), equalTo(true));
});
engine.flush();
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
}
@Test
public void testStressShouldPeriodicallyFlush() throws Exception {
final long flushThreshold = randomLongBetween(120, 5000);
final long generationThreshold = randomLongBetween(1000, 5000);
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_TRANSLOG_GENERATION_THRESHOLD_SIZE_SETTING.getKey(), generationThreshold + "b")
.put(IndexSettings.INDEX_TRANSLOG_FLUSH_THRESHOLD_SIZE_SETTING.getKey(), flushThreshold + "b")).build();
indexSettings.updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
final int numOps = scaledRandomIntBetween(100, 10_000);
for (int i = 0; i < numOps; i++) {
final long localCheckPoint = engine.getProcessedLocalCheckpoint();
final long seqno = randomLongBetween(Math.max(0, localCheckPoint), localCheckPoint + 5);
final ParsedDocument doc =
testParsedDocument(Long.toString(seqno), null, testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 1L, seqno, false));
if (rarely() && engine.getTranslog().shouldRollGeneration()) {
engine.rollTranslogGeneration();
}
if (rarely() || engine.shouldPeriodicallyFlush()) {
engine.flush();
assertThat(engine.shouldPeriodicallyFlush(), equalTo(false));
}
}
}
@Test
public void testStressUpdateSameDocWhileGettingIt() throws IOException, InterruptedException {
final int iters = randomIntBetween(1, 15);
for (int i = 0; i < iters; i++) {
// this is a reproduction of https://github.com/elastic/elasticsearch/issues/28714
try (Store store = createStore(); InternalEngine engine = createEngine(store, createTempDir())) {
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), TimeValue.timeValueMillis(1))).build();
engine.engineConfig.getIndexSettings().updateIndexMetadata(indexMetadata);
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
ParsedDocument document = testParsedDocument(Integer.toString(0), null, testDocumentWithTextField(), SOURCE, null);
final Engine.Index doc = new Engine.Index(newUid(document), document, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(),
-1, false, UNASSIGNED_SEQ_NO, 0);
// first index an append only document and then delete it. such that we have it in the tombstones
engine.index(doc);
engine.delete(new Engine.Delete(
doc.id(),
doc.uid(),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
// now index more append only docs and refresh so we re-enabel the optimization for unsafe version map
ParsedDocument document1 = testParsedDocument(Integer.toString(1), null, testDocumentWithTextField(), SOURCE, null);
engine.index(new Engine.Index(newUid(document1), document1, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_ANY, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false,
UNASSIGNED_SEQ_NO, 0));
engine.refresh("test");
ParsedDocument document2 = testParsedDocument(Integer.toString(2), null, testDocumentWithTextField(), SOURCE, null);
engine.index(new Engine.Index(newUid(document2), document2, UNASSIGNED_SEQ_NO, 0, Versions.MATCH_ANY, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false,
UNASSIGNED_SEQ_NO, 0));
engine.refresh("test");
ParsedDocument document3 = testParsedDocument(Integer.toString(3), null, testDocumentWithTextField(), SOURCE, null);
final Engine.Index doc3 = new Engine.Index(newUid(document3), document3, UNASSIGNED_SEQ_NO, 0,
Versions.MATCH_ANY, VersionType.INTERNAL, Engine.Operation.Origin.PRIMARY, System.nanoTime(),
-1, false, UNASSIGNED_SEQ_NO, 0);
engine.index(doc3);
engine.engineConfig.setEnableGcDeletes(true);
// once we are here the version map is unsafe again and we need to do a refresh inside the get calls to ensure we
// de-optimize. We also enabled GCDeletes which now causes pruning tombstones inside that refresh that is done internally
// to ensure we de-optimize. One get call will purne and the other will try to lock the version map concurrently while
// holding the lock that pruneTombstones needs and we have a deadlock
CountDownLatch awaitStarted = new CountDownLatch(1);
Thread thread = new Thread(() -> {
awaitStarted.countDown();
try (Engine.GetResult getResult = engine.get(new Engine.Get(
doc3.id(), doc3.uid()), engine::acquireSearcher)) {
assertThat(getResult.docIdAndVersion(), is(notNullValue()));
}
});
thread.start();
awaitStarted.await();
try (Engine.GetResult getResult = engine.get(
new Engine.Get(doc.id(), doc.uid()),
engine::acquireSearcher)) {
assertThat(getResult.docIdAndVersion(), is(nullValue()));
}
thread.join();
}
}
}
@Test
public void testPruneOnlyDeletesAtMostLocalCheckpoint() throws Exception {
final AtomicLong clock = new AtomicLong(0);
threadPool = spy(threadPool);
when(threadPool.relativeTimeInMillis()).thenAnswer(invocation -> clock.get());
final long gcInterval = randomIntBetween(0, 10);
final IndexSettings indexSettings = engine.config().getIndexSettings();
final IndexMetadata indexMetadata = IndexMetadata.builder(indexSettings.getIndexMetadata())
.settings(Settings.builder().put(indexSettings.getSettings())
.put(IndexSettings.INDEX_GC_DELETES_SETTING.getKey(), TimeValue.timeValueMillis(gcInterval).getStringRep())).build();
indexSettings.updateIndexMetadata(indexMetadata);
try (Store store = createStore();
InternalEngine engine = createEngine(store, createTempDir())) {
engine.config().setEnableGcDeletes(false);
for (int i = 0, docs = scaledRandomIntBetween(0, 10); i < docs; i++) {
index(engine, i);
}
final long deleteBatch = between(10, 20);
final long gapSeqNo = randomLongBetween(
engine.getSeqNoStats(-1).getMaxSeqNo() + 1, engine.getSeqNoStats(-1).getMaxSeqNo() + deleteBatch);
for (int i = 0; i < deleteBatch; i++) {
final long seqno = generateNewSeqNo(engine);
if (seqno != gapSeqNo) {
if (randomBoolean()) {
clock.incrementAndGet();
}
engine.delete(replicaDeleteForDoc(UUIDs.randomBase64UUID(), 1, seqno, threadPool.relativeTimeInMillis()));
}
}
List<DeleteVersionValue> tombstones = new ArrayList<>(tombstonesInVersionMap(engine).values());
engine.config().setEnableGcDeletes(true);
// Prune tombstones whose seqno < gap_seqno and timestamp < clock-gcInterval.
clock.set(randomLongBetween(gcInterval, deleteBatch + gcInterval));
engine.refresh("test");
tombstones.removeIf(v -> v.seqNo < gapSeqNo && v.time < clock.get() - gcInterval);
assertThat(tombstonesInVersionMap(engine).values(), containsInAnyOrder(tombstones.toArray()));
// Prune tombstones whose seqno at most the local checkpoint (eg. seqno < gap_seqno).
clock.set(randomLongBetween(deleteBatch + gcInterval * 4/3, 100)); // Need a margin for gcInterval/4.
engine.refresh("test");
tombstones.removeIf(v -> v.seqNo < gapSeqNo);
assertThat(tombstonesInVersionMap(engine).values(), containsInAnyOrder(tombstones.toArray()));
// Fill the seqno gap - should prune all tombstones.
clock.set(between(0, 100));
if (randomBoolean()) {
engine.index(replicaIndexForDoc(testParsedDocument("d", null, testDocumentWithTextField(),
SOURCE, null), 1, gapSeqNo, false));
} else {
engine.delete(replicaDeleteForDoc(UUIDs.randomBase64UUID(), Versions.MATCH_ANY,
gapSeqNo, threadPool.relativeTimeInMillis()));
}
clock.set(randomLongBetween(100 + gcInterval * 4/3, Long.MAX_VALUE)); // Need a margin for gcInterval/4.
engine.refresh("test");
assertThat(tombstonesInVersionMap(engine).values(), empty());
}
}
@Test
public void testTrimUnsafeCommits() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final int maxSeqNo = 40;
final List<Long> seqNos = LongStream.rangeClosed(0, maxSeqNo).boxed().collect(Collectors.toList());
Collections.shuffle(seqNos, random());
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, createTempDir(), newMergePolicy(),
null, null, globalCheckpoint::get);
final List<Long> commitMaxSeqNo = new ArrayList<>();
final long minTranslogGen;
try (InternalEngine engine = createEngine(config)) {
for (int i = 0; i < seqNos.size(); i++) {
ParsedDocument doc = testParsedDocument(Long.toString(seqNos.get(i)), null, testDocument(),
new BytesArray("{}"), null);
Engine.Index index = new Engine.Index(newUid(doc), doc, seqNos.get(i), 0,
1, null, REPLICA, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0);
engine.index(index);
if (randomBoolean()) {
engine.flush();
final Long maxSeqNoInCommit = seqNos.subList(0, i + 1).stream().max(Long::compareTo).orElse(-1L);
commitMaxSeqNo.add(maxSeqNoInCommit);
}
}
globalCheckpoint.set(randomInt(maxSeqNo));
engine.syncTranslog();
minTranslogGen = engine.getTranslog().getMinFileGeneration();
}
store.trimUnsafeCommits(globalCheckpoint.get(), minTranslogGen,config.getIndexSettings().getIndexVersionCreated());
long safeMaxSeqNo =
commitMaxSeqNo.stream().filter(s -> s <= globalCheckpoint.get())
.reduce((s1, s2) -> s2) // get the last one.
.orElse(SequenceNumbers.NO_OPS_PERFORMED);
final List<IndexCommit> commits = DirectoryReader.listCommits(store.directory());
assertThat(commits, hasSize(1));
assertThat(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO), equalTo(Long.toString(safeMaxSeqNo)));
try (IndexReader reader = DirectoryReader.open(commits.get(0))) {
for (LeafReaderContext context: reader.leaves()) {
final NumericDocValues values = context.reader().getNumericDocValues(SeqNoFieldMapper.NAME);
if (values != null) {
for (int docID = 0; docID < context.reader().maxDoc(); docID++) {
if (values.advanceExact(docID) == false) {
throw new AssertionError("Document does not have a seq number: " + docID);
}
assertThat(values.longValue(), lessThanOrEqualTo(globalCheckpoint.get()));
}
}
}
}
}
}
@Test
public void testLuceneHistoryOnPrimary() throws Exception {
final List<Engine.Operation> operations = generateSingleDocHistory(
false, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 10, 300, "1");
assertOperationHistoryInLucene(operations);
}
@Test
public void testLuceneHistoryOnReplica() throws Exception {
final List<Engine.Operation> operations = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 10, 300, "2");
Randomness.shuffle(operations);
assertOperationHistoryInLucene(operations);
}
private void assertOperationHistoryInLucene(List<Engine.Operation> operations) throws IOException {
final MergePolicy keepSoftDeleteDocsMP = new SoftDeletesRetentionMergePolicy(
Lucene.SOFT_DELETES_FIELD, MatchAllDocsQuery::new, engine.config().getMergePolicy());
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10));
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
Set<Long> expectedSeqNos = new HashSet<>();
try (Store store = createStore();
Engine engine = createEngine(config(indexSettings, store, createTempDir(), keepSoftDeleteDocsMP, null))) {
for (Engine.Operation op : operations) {
if (op instanceof Engine.Index) {
Engine.IndexResult indexResult = engine.index((Engine.Index) op);
assertThat(indexResult.getFailure(), nullValue());
expectedSeqNos.add(indexResult.getSeqNo());
} else {
Engine.DeleteResult deleteResult = engine.delete((Engine.Delete) op);
assertThat(deleteResult.getFailure(), nullValue());
expectedSeqNos.add(deleteResult.getSeqNo());
}
if (rarely()) {
engine.refresh("test");
}
if (rarely()) {
engine.flush();
}
if (rarely()) {
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
}
}
MapperService mapperService = createMapperService("test");
List<Translog.Operation> actualOps = readAllOperationsInLucene(engine, mapperService);
assertThat(actualOps.stream().map(o -> o.seqNo()).collect(Collectors.toList()), containsInAnyOrder(expectedSeqNos.toArray()));
assertConsistentHistoryBetweenTranslogAndLuceneIndex(engine, mapperService);
}
}
@Test
public void testKeepMinRetainedSeqNoByMergePolicy() throws IOException {
IOUtils.close(engine, store);
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10));
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
final long primaryTerm = randomLongBetween(1, Long.MAX_VALUE);
final AtomicLong retentionLeasesVersion = new AtomicLong();
final AtomicReference<RetentionLeases> retentionLeasesHolder = new AtomicReference<>(
new RetentionLeases(primaryTerm, retentionLeasesVersion.get(), Collections.emptyList()));
final List<Engine.Operation> operations = generateSingleDocHistory(
true, randomFrom(VersionType.INTERNAL, VersionType.EXTERNAL), false, 2, 10, 300, "2");
Randomness.shuffle(operations);
Set<Long> existingSeqNos = new HashSet<>();
store = createStore();
engine = createEngine(config(
indexSettings,
store,
createTempDir(),
newMergePolicy(),
null,
null,
globalCheckpoint::get,
retentionLeasesHolder::get
));
assertThat(engine.getMinRetainedSeqNo(), equalTo(0L));
long lastMinRetainedSeqNo = engine.getMinRetainedSeqNo();
for (Engine.Operation op : operations) {
final Engine.Result result;
if (op instanceof Engine.Index) {
result = engine.index((Engine.Index) op);
} else {
result = engine.delete((Engine.Delete) op);
}
existingSeqNos.add(result.getSeqNo());
if (randomBoolean()) {
engine.syncTranslog(); // advance persisted local checkpoint
assertEquals(engine.getProcessedLocalCheckpoint(), engine.getPersistedLocalCheckpoint());
globalCheckpoint.set(
randomLongBetween(globalCheckpoint.get(), engine.getLocalCheckpointTracker().getPersistedCheckpoint()));
}
if (randomBoolean()) {
retentionLeasesVersion.incrementAndGet();
final int length = randomIntBetween(0, 8);
final List<RetentionLease> leases = new ArrayList<>(length);
for (int i = 0; i < length; i++) {
final String id = randomAlphaOfLength(8);
final long retainingSequenceNumber = randomLongBetween(0, Math.max(0, globalCheckpoint.get()));
final long timestamp = randomLongBetween(0L, Long.MAX_VALUE);
final String source = randomAlphaOfLength(8);
leases.add(new RetentionLease(id, retainingSequenceNumber, timestamp, source));
}
retentionLeasesHolder.set(new RetentionLeases(primaryTerm, retentionLeasesVersion.get(), leases));
}
if (rarely()) {
settings.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), randomLongBetween(0, 10));
indexSettings.updateIndexMetadata(IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
engine.onSettingsChanged(indexSettings.getTranslogRetentionAge(), indexSettings.getTranslogRetentionSize(),
indexSettings.getSoftDeleteRetentionOperations());
}
if (rarely()) {
engine.refresh("test");
}
if (rarely()) {
engine.flush(true, true);
assertThat(Long.parseLong(engine.getLastCommittedSegmentInfos().userData.get(Engine.MIN_RETAINED_SEQNO)),
equalTo(engine.getMinRetainedSeqNo()));
}
if (rarely()) {
engine.forceMerge(randomBoolean(), 1, false, false, false, UUIDs.randomBase64UUID());
}
try (Closeable ignored = engine.acquireHistoryRetentionLock(Engine.HistorySource.INDEX)) {
long minRetainSeqNos = engine.getMinRetainedSeqNo();
assertThat(minRetainSeqNos, lessThanOrEqualTo(globalCheckpoint.get() + 1));
Long[] expectedOps = existingSeqNos.stream().filter(seqno -> seqno >= minRetainSeqNos).toArray(Long[]::new);
Set<Long> actualOps = readAllOperationsInLucene(engine, createMapperService("test")).stream()
.map(Translog.Operation::seqNo).collect(Collectors.toSet());
assertThat(actualOps, containsInAnyOrder(expectedOps));
}
try (Engine.IndexCommitRef commitRef = engine.acquireSafeIndexCommit()) {
IndexCommit safeCommit = commitRef.getIndexCommit();
if (safeCommit.getUserData().containsKey(Engine.MIN_RETAINED_SEQNO)) {
lastMinRetainedSeqNo = Long.parseLong(safeCommit.getUserData().get(Engine.MIN_RETAINED_SEQNO));
}
}
}
if (randomBoolean()) {
engine.close();
} else {
engine.flushAndClose();
}
try (InternalEngine recoveringEngine = new InternalEngine(engine.config())) {
assertThat(recoveringEngine.getMinRetainedSeqNo(), equalTo(lastMinRetainedSeqNo));
}
}
@Test
public void testLastRefreshCheckpoint() throws Exception {
AtomicBoolean done = new AtomicBoolean();
Thread[] refreshThreads = new Thread[between(1, 8)];
CountDownLatch latch = new CountDownLatch(refreshThreads.length);
for (int i = 0; i < refreshThreads.length; i++) {
latch.countDown();
refreshThreads[i] = new Thread(() -> {
while (done.get() == false) {
long checkPointBeforeRefresh = engine.getProcessedLocalCheckpoint();
engine.refresh("test", randomFrom(Engine.SearcherScope.values()), true);
assertThat(engine.lastRefreshedCheckpoint(), greaterThanOrEqualTo(checkPointBeforeRefresh));
}
});
refreshThreads[i].start();
}
latch.await();
List<Engine.Operation> ops = generateSingleDocHistory(
true, VersionType.EXTERNAL, false, 1, 10, 1000, "1");
concurrentlyApplyOps(ops, engine);
done.set(true);
for (Thread thread : refreshThreads) {
thread.join();
}
engine.refresh("test");
assertThat(engine.lastRefreshedCheckpoint(), equalTo(engine.getProcessedLocalCheckpoint()));
}
@Test
public void testLuceneSnapshotRefreshesOnlyOnce() throws Exception {
final MapperService mapperService = createMapperService("test");
final long maxSeqNo = randomLongBetween(10, 50);
final AtomicLong refreshCounter = new AtomicLong();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(Settings.builder().
put(defaultSettings.getSettings()).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true)).build());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(),
null,
new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
refreshCounter.incrementAndGet();
}
@Override
public void afterRefresh(boolean didRefresh) {
}
}, () -> SequenceNumbers.NO_OPS_PERFORMED))) {
for (long seqNo = 0; seqNo <= maxSeqNo; seqNo++) {
final ParsedDocument doc = testParsedDocument("id_" + seqNo, null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
engine.index(replicaIndexForDoc(doc, 1, seqNo, randomBoolean()));
}
final long initialRefreshCount = refreshCounter.get();
final Thread[] snapshotThreads = new Thread[between(1, 3)];
CountDownLatch latch = new CountDownLatch(1);
for (int i = 0; i < snapshotThreads.length; i++) {
final long min = randomLongBetween(0, maxSeqNo - 5);
final long max = randomLongBetween(min, maxSeqNo);
snapshotThreads[i] = new Thread(new AbstractRunnable() {
@Override
public void onFailure(Exception e) {
throw new AssertionError(e);
}
@Override
protected void doRun() throws Exception {
latch.await();
Translog.Snapshot changes = engine.newChangesSnapshot("test", mapperService, min, max, true);
changes.close();
}
});
snapshotThreads[i].start();
}
latch.countDown();
for (Thread thread : snapshotThreads) {
thread.join();
}
assertThat(refreshCounter.get(), equalTo(initialRefreshCount + 1L));
assertThat(engine.lastRefreshedCheckpoint(), equalTo(maxSeqNo));
}
}
@Test
public void testAcquireSearcherOnClosingEngine() throws Exception {
engine.close();
expectThrows(AlreadyClosedException.class, () -> engine.acquireSearcher("test", Engine.SearcherScope.INTERNAL));
}
@Test
public void testNoOpOnClosingEngine() throws Exception {
engine.close();
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
assertTrue(indexSettings.isSoftDeleteEnabled());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
engine.close();
expectThrows(AlreadyClosedException.class, () -> engine.noOp(
new Engine.NoOp(2, primaryTerm.get(), LOCAL_TRANSLOG_RECOVERY, System.nanoTime(), "reason")));
}
}
@Test
public void testSoftDeleteOnClosingEngine() throws Exception {
engine.close();
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
assertTrue(indexSettings.isSoftDeleteEnabled());
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
engine.close();
expectThrows(AlreadyClosedException.class, () -> engine.delete(replicaDeleteForDoc("test", 42, 7, System.nanoTime())));
}
}
@Test
public void testTrackMaxSeqNoOfUpdatesOrDeletesOnPrimary() throws Exception {
engine.close();
Set<String> liveDocIds = new HashSet<>();
engine = new InternalEngine(engine.config());
assertThat(engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(-1L));
int numOps = between(1, 500);
for (int i = 0; i < numOps; i++) {
long currentMaxSeqNoOfUpdates = engine.getMaxSeqNoOfUpdatesOrDeletes();
ParsedDocument doc = createParsedDoc(Integer.toString(between(1, 100)), null);
if (randomBoolean()) {
Engine.IndexResult result = engine.index(indexForDoc(doc));
if (liveDocIds.add(doc.id()) == false) {
assertThat("update operations on primary must advance max_seq_no_of_updates",
engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, result.getSeqNo())));
} else {
assertThat("append operations should not advance max_seq_no_of_updates",
engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(currentMaxSeqNoOfUpdates));
}
} else {
Engine.DeleteResult result = engine.delete(new Engine.Delete(
doc.id(),
newUid(doc.id()),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
));
liveDocIds.remove(doc.id());
assertThat("delete operations on primary must advance max_seq_no_of_updates",
engine.getMaxSeqNoOfUpdatesOrDeletes(), equalTo(Math.max(currentMaxSeqNoOfUpdates, result.getSeqNo())));
}
}
}
@Test
public void testRebuildLocalCheckpointTrackerAndVersionMap() throws Exception {
Settings.Builder settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_RETENTION_OPERATIONS_SETTING.getKey(), 10000)
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true);
final IndexMetadata indexMetadata = IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(indexMetadata);
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
Path translogPath = createTempDir();
List<Engine.Operation> operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean());
List<List<Engine.Operation>> commits = new ArrayList<>();
commits.add(new ArrayList<>());
try (Store store = createStore()) {
EngineConfig config = config(indexSettings, store, translogPath, NoMergePolicy.INSTANCE, null, null, globalCheckpoint::get);
final List<DocIdSeqNoAndSource> docs;
try (InternalEngine engine = createEngine(config)) {
List<Engine.Operation> flushedOperations = new ArrayList<>();
for (Engine.Operation op : operations) {
flushedOperations.add(op);
applyOperation(engine, op);
if (randomBoolean()) {
engine.syncTranslog();
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
}
if (randomInt(100) < 10) {
engine.refresh("test");
}
if (randomInt(100) < 5) {
engine.flush(true, true);
flushedOperations.sort(Comparator.comparing(Engine.Operation::seqNo));
commits.add(new ArrayList<>(flushedOperations));
}
}
docs = getDocIds(engine, true);
}
List<Engine.Operation> operationsInSafeCommit = null;
for (int i = commits.size() - 1; i >= 0; i--) {
if (commits.get(i).stream().allMatch(op -> op.seqNo() <= globalCheckpoint.get())) {
operationsInSafeCommit = commits.get(i);
break;
}
}
assertThat(operationsInSafeCommit, notNullValue());
try (InternalEngine engine = new InternalEngine(config)) { // do not recover from translog
final Map<BytesRef, Engine.Operation> deletesAfterCheckpoint = new HashMap<>();
for (Engine.Operation op : operationsInSafeCommit) {
if (op instanceof Engine.NoOp == false && op.seqNo() > engine.getPersistedLocalCheckpoint()) {
deletesAfterCheckpoint.put(new Term(IdFieldMapper.NAME, Uid.encodeId(op.id())).bytes(), op);
}
}
deletesAfterCheckpoint.values().removeIf(o -> o instanceof Engine.Delete == false);
final Map<BytesRef, VersionValue> versionMap = engine.getVersionMap();
for (BytesRef uid : deletesAfterCheckpoint.keySet()) {
final VersionValue versionValue = versionMap.get(uid);
final Engine.Operation op = deletesAfterCheckpoint.get(uid);
final String msg = versionValue + " vs " +
"op[" + op.operationType() + "id=" + op.id() + " seqno=" + op.seqNo() + " term=" + op.primaryTerm() + "]";
assertThat(versionValue, instanceOf(DeleteVersionValue.class));
assertThat(msg, versionValue.seqNo, equalTo(op.seqNo()));
assertThat(msg, versionValue.term, equalTo(op.primaryTerm()));
assertThat(msg, versionValue.version, equalTo(op.version()));
}
assertThat(versionMap.keySet(), equalTo(deletesAfterCheckpoint.keySet()));
final LocalCheckpointTracker tracker = engine.getLocalCheckpointTracker();
final Set<Long> seqNosInSafeCommit = operationsInSafeCommit.stream().map(op -> op.seqNo()).collect(Collectors.toSet());
for (Engine.Operation op : operations) {
assertThat(
"seq_no=" + op.seqNo() + " max_seq_no=" + tracker.getMaxSeqNo() + " checkpoint=" + tracker.getProcessedCheckpoint(),
tracker.hasProcessed(op.seqNo()), equalTo(seqNosInSafeCommit.contains(op.seqNo())));
}
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertThat(getDocIds(engine, true), equalTo(docs));
}
}
}
@Test
public void testRequireSoftDeletesWhenAccessingChangesSnapshot() throws Exception {
try (Store store = createStore()) {
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(Settings.builder().
put(defaultSettings.getSettings()).put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), false)).build());
try (InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), newMergePolicy(), null))) {
AssertionError error = expectThrows(AssertionError.class,
() -> engine.newChangesSnapshot("test", createMapperService("test"), 0, randomNonNegativeLong(), randomBoolean()));
assertThat(error.getMessage(), containsString("does not have soft-deletes enabled"));
}
}
}
private void assertLuceneOperations(InternalEngine engine,
long expectedAppends,
long expectedUpdates,
long expectedDeletes) {
String message = "Lucene operations mismatched;" +
" appends [actual:" + engine.getNumDocAppends() + ", expected:" + expectedAppends + "]," +
" updates [actual:" + engine.getNumDocUpdates() + ", expected:" + expectedUpdates + "]," +
" deletes [actual:" + engine.getNumDocDeletes() + ", expected:" + expectedDeletes + "]";
assertThat(message, engine.getNumDocAppends(), equalTo(expectedAppends));
assertThat(message, engine.getNumDocUpdates(), equalTo(expectedUpdates));
assertThat(message, engine.getNumDocDeletes(), equalTo(expectedDeletes));
}
@Test
public void testStoreHonorsLuceneVersion() throws IOException {
for (Version createdVersion : Arrays.asList(
Version.CURRENT, VersionUtils.getPreviousMinorVersion(), VersionUtils.getFirstVersion())) {
Settings settings = Settings.builder()
.put(indexSettings())
.put(IndexMetadata.SETTING_VERSION_CREATED, createdVersion).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings("test", settings);
try (Store store = createStore();
InternalEngine engine = createEngine(config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
ParsedDocument doc = testParsedDocument("1", null, new Document(),
new BytesArray("{}".getBytes("UTF-8")), null);
engine.index(appendOnlyPrimary(doc, false, 1));
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
LeafReader leafReader = getOnlyLeafReader(searcher.getIndexReader());
assertEquals(createdVersion.luceneVersion.major, leafReader.getMetaData().getCreatedVersionMajor());
}
}
}
}
@Test
public void testMaxSeqNoInCommitUserData() throws Exception {
AtomicBoolean running = new AtomicBoolean(true);
Thread rollTranslog = new Thread(() -> {
while (running.get() && engine.getTranslog().currentFileGeneration() < 500) {
engine.rollTranslogGeneration(); // make adding operations to translog slower
}
});
rollTranslog.start();
Thread indexing = new Thread(() -> {
long seqNo = 0;
while (running.get() && seqNo <= 1000) {
try {
String id = Long.toString(between(1, 50));
if (randomBoolean()) {
ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null);
engine.index(replicaIndexForDoc(doc, 1L, seqNo, false));
} else {
engine.delete(replicaDeleteForDoc(id, 1L, seqNo, 0L));
}
seqNo++;
} catch (IOException e) {
throw new AssertionError(e);
}
}
});
indexing.start();
int numCommits = between(5, 20);
for (int i = 0; i < numCommits; i++) {
engine.flush(false, true);
}
running.set(false);
indexing.join();
rollTranslog.join();
assertMaxSeqNoInCommitUserData(engine);
}
@Test
public void testPruneAwayDeletedButRetainedIds() throws Exception {
IOUtils.close(engine, store);
Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
store = createStore(indexSettings, newDirectory());
LogDocMergePolicy policy = new LogDocMergePolicy();
policy.setMinMergeDocs(10000);
try (InternalEngine engine = createEngine(indexSettings, store, createTempDir(), policy)) {
int numDocs = between(1, 20);
logger.info("" + numDocs);
for (int i = 0; i < numDocs; i++) {
index(engine, i);
}
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
engine.delete(new Engine.Delete("0", newUid("0"), primaryTerm.get()));
engine.refresh("test");
// now we have 2 segments since we now added a tombstone plus the old segment with the delete
try (Searcher searcher = engine.acquireSearcher("test")) {
IndexReader reader = searcher.getIndexReader();
assertEquals(2, reader.leaves().size());
LeafReaderContext leafReaderContext = reader.leaves().get(0);
LeafReader leafReader = leafReaderContext.reader();
assertEquals("the delete and the tombstone", 1, leafReader.numDeletedDocs());
assertEquals(numDocs, leafReader.maxDoc());
Terms id = leafReader.terms("_id");
assertNotNull(id);
assertEquals("deleted IDs are NOT YET pruned away", reader.numDocs() + 1, id.size());
TermsEnum iterator = id.iterator();
assertTrue(iterator.seekExact(Uid.encodeId("0")));
}
// lets force merge the tombstone and the original segment and make sure the doc is still there but the ID term is gone
engine.forceMerge(true, 1, false, false, false, UUIDs.randomBase64UUID());
engine.refresh("test");
try (Searcher searcher = engine.acquireSearcher("test")) {
IndexReader reader = searcher.getIndexReader();
assertEquals(1, reader.leaves().size());
LeafReaderContext leafReaderContext = reader.leaves().get(0);
LeafReader leafReader = leafReaderContext.reader();
assertEquals("the delete and the tombstone", 2, leafReader.numDeletedDocs());
assertEquals(numDocs + 1, leafReader.maxDoc());
Terms id = leafReader.terms("_id");
if (numDocs == 1) {
assertNull(id); // everything is pruned away
assertEquals(0, leafReader.numDocs());
} else {
assertNotNull(id);
assertEquals("deleted IDs are pruned away", reader.numDocs(), id.size());
TermsEnum iterator = id.iterator();
assertFalse(iterator.seekExact(Uid.encodeId("0")));
}
}
}
}
@Test
public void testRecoverFromLocalTranslog() throws Exception {
final AtomicLong globalCheckpoint = new AtomicLong(SequenceNumbers.NO_OPS_PERFORMED);
Path translogPath = createTempDir();
List<Engine.Operation> operations = generateHistoryOnReplica(between(1, 500), randomBoolean(), randomBoolean());
try (Store store = createStore()) {
EngineConfig config = config(defaultSettings, store, translogPath, newMergePolicy(), null, null, globalCheckpoint::get);
final List<DocIdSeqNoAndSource> docs;
try (InternalEngine engine = createEngine(config)) {
for (Engine.Operation op : operations) {
applyOperation(engine, op);
if (randomBoolean()) {
engine.syncTranslog();
globalCheckpoint.set(randomLongBetween(globalCheckpoint.get(), engine.getPersistedLocalCheckpoint()));
}
if (randomInt(100) < 10) {
engine.refresh("test");
}
if (randomInt(100) < 5) {
engine.flush();
}
if (randomInt(100) < 5) {
engine.forceMerge(randomBoolean(), 1, false, false, false, UUIDs.randomBase64UUID());
}
}
if (randomBoolean()) {
// engine is flushed properly before shutting down.
engine.syncTranslog();
globalCheckpoint.set(engine.getPersistedLocalCheckpoint());
engine.flush();
}
docs = getDocIds(engine, true);
}
try (InternalEngine engine = new InternalEngine(config)) {
engine.onSettingsChanged(TimeValue.MINUS_ONE, ByteSizeValue.ZERO, 0);
engine.recoverFromTranslog(translogHandler, Long.MAX_VALUE);
assertThat(getDocIds(engine, randomBoolean()), equalTo(docs));
if (engine.getSeqNoStats(globalCheckpoint.get()).getMaxSeqNo() == globalCheckpoint.get()) {
assertThat("engine should trim all unreferenced translog after recovery",
engine.getTranslog().getMinFileGeneration(), equalTo(engine.getTranslog().currentFileGeneration()));
}
}
}
}
private Map<BytesRef, DeleteVersionValue> tombstonesInVersionMap(InternalEngine engine) {
return engine.getVersionMap().entrySet().stream()
.filter(e -> e.getValue() instanceof DeleteVersionValue)
.collect(Collectors.toMap(e -> e.getKey(), e -> (DeleteVersionValue) e.getValue()));
}
@Test
public void testTreatDocumentFailureAsFatalError() throws Exception {
AtomicReference<IOException> addDocException = new AtomicReference<>();
IndexWriterFactory indexWriterFactory = (dir, iwc) -> new IndexWriter(dir, iwc) {
@Override
public long addDocument(Iterable<? extends IndexableField> doc) throws IOException {
final IOException ex = addDocException.getAndSet(null);
if (ex != null) {
throw ex;
}
return super.addDocument(doc);
}
};
try (Store store = createStore();
InternalEngine engine = createEngine(defaultSettings,
store,
createTempDir(),
NoMergePolicy.INSTANCE,
indexWriterFactory)) {
final ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField(), SOURCE, null);
Engine.Operation.Origin origin = randomFrom(REPLICA, LOCAL_RESET, PEER_RECOVERY);
Engine.Index index = new Engine.Index(
newUid(doc),
doc,
randomNonNegativeLong(),
primaryTerm.get(),
randomNonNegativeLong(),
null,
origin,
System.nanoTime(),
-1,
false,
UNASSIGNED_SEQ_NO,
UNASSIGNED_PRIMARY_TERM);
addDocException.set(new IOException("simulated"));
expectThrows(IOException.class, () -> engine.index(index));
assertTrue(engine.isClosed.get());
assertNotNull(engine.failedEngine.get());
}
}
/**
* We can trim translog on primary promotion and peer recovery based on the fact we add operations with either
* REPLICA or PEER_RECOVERY origin to translog although they already exist in the engine (i.e. hasProcessed() == true).
* If we decide not to add those already-processed operations to translog, we need to study carefully the consequence
* of the translog trimming in these two places.
*/
@Test
public void testAlwaysRecordReplicaOrPeerRecoveryOperationsToTranslog() throws Exception {
List<Engine.Operation> operations = generateHistoryOnReplica(between(1, 100), randomBoolean(), randomBoolean());
applyOperations(engine, operations);
Set<Long> seqNos = operations.stream().map(Engine.Operation::seqNo).collect(Collectors.toSet());
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(operations.size()));
assertThat(TestTranslog.drainSnapshot(snapshot, false).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()),
equalTo(seqNos));
}
primaryTerm.set(randomLongBetween(primaryTerm.get(), Long.MAX_VALUE));
engine.rollTranslogGeneration();
engine.trimOperationsFromTranslog(primaryTerm.get(), NO_OPS_PERFORMED); // trim everything in translog
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(0));
assertNull(snapshot.next());
}
applyOperations(engine, operations);
try (Translog.Snapshot snapshot = getTranslog(engine).newSnapshot()) {
assertThat(snapshot.totalOperations(), equalTo(operations.size()));
assertThat(TestTranslog.drainSnapshot(snapshot, false).stream().map(Translog.Operation::seqNo).collect(Collectors.toSet()),
equalTo(seqNos));
}
}
@Test
public void testNoOpFailure() throws IOException {
engine.close();
final Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), true).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
try (Store store = createStore();
Engine engine = createEngine((dir, iwc) -> new IndexWriter(dir, iwc) {
@Override
public long addDocument(Iterable<? extends IndexableField> doc) {
throw new IllegalArgumentException("fatal");
}
}, null, null, config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
final Engine.NoOp op = new Engine.NoOp(0, 0, PRIMARY, System.currentTimeMillis(), "test");
final IllegalArgumentException e = expectThrows(IllegalArgumentException. class, () -> engine.noOp(op));
assertThat(e.getMessage(), equalTo("fatal"));
assertTrue(engine.isClosed.get());
assertThat(engine.failedEngine.get(), not(nullValue()));
assertThat(engine.failedEngine.get(), instanceOf(IllegalArgumentException.class));
assertThat(engine.failedEngine.get().getMessage(), equalTo("fatal"));
}
}
@Test
public void testDeleteFailureSoftDeletesEnabledDocAlreadyDeleted() throws IOException {
runTestDeleteFailure(true, InternalEngine::delete);
}
@Test
public void testDeleteFailureSoftDeletesEnabled() throws IOException {
runTestDeleteFailure(true, (engine, op) -> {});
}
@Test
public void testDeleteFailureSoftDeletesDisabled() throws IOException {
runTestDeleteFailure(false, (engine, op) -> {});
}
private void runTestDeleteFailure(
final boolean softDeletesEnabled,
final CheckedBiConsumer<InternalEngine, Engine.Delete, IOException> consumer) throws IOException {
engine.close();
final Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.put(IndexSettings.INDEX_SOFT_DELETES_SETTING.getKey(), softDeletesEnabled).build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
final AtomicReference<ThrowingIndexWriter> iw = new AtomicReference<>();
try (Store store = createStore();
InternalEngine engine = createEngine(
(dir, iwc) -> {
iw.set(new ThrowingIndexWriter(dir, iwc));
return iw.get();
},
null,
null,
config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null)
)) {
engine.index(new Engine.Index(newUid("0"), primaryTerm.get(), InternalEngineTests.createParsedDoc("0", null)));
final Engine.Delete op = new Engine.Delete("0", newUid("0"), primaryTerm.get());
consumer.accept(engine, op);
iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal"));
final IllegalArgumentException e = expectThrows(IllegalArgumentException. class, () -> engine.delete(op));
assertThat(e.getMessage(), equalTo("fatal"));
assertTrue(engine.isClosed.get());
assertThat(engine.failedEngine.get(), not(nullValue()));
assertThat(engine.failedEngine.get(), instanceOf(IllegalArgumentException.class));
assertThat(engine.failedEngine.get().getMessage(), equalTo("fatal"));
}
}
@Test
public void testRealtimeGetOnlyRefreshIfNeeded() throws Exception {
final AtomicInteger refreshCount = new AtomicInteger();
final ReferenceManager.RefreshListener refreshListener = new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
}
@Override
public void afterRefresh(boolean didRefresh) {
if (didRefresh) {
refreshCount.incrementAndGet();
}
}
};
try (Store store = createStore()) {
final EngineConfig config = config(
defaultSettings,
store,
createTempDir(),
newMergePolicy(),
null,
refreshListener,
null,
null
);
try (InternalEngine engine = createEngine(config)) {
int numDocs = randomIntBetween(10, 100);
Set<String> ids = new HashSet<>();
for (int i = 0; i < numDocs; i++) {
String id = Integer.toString(i);
engine.index(indexForDoc(createParsedDoc(id, null)));
ids.add(id);
}
final int refreshCountBeforeGet = refreshCount.get();
Thread[] getters = new Thread[randomIntBetween(1, 4)];
Phaser phaser = new Phaser(getters.length + 1);
for (int t = 0; t < getters.length; t++) {
getters[t] = new Thread(() -> {
phaser.arriveAndAwaitAdvance();
int iters = randomIntBetween(1, 10);
for (int i = 0; i < iters; i++) {
ParsedDocument doc = createParsedDoc(randomFrom(ids), null);
try (Engine.GetResult getResult = engine.get(newGet(doc), engine::acquireSearcher)) {
assertThat(getResult.docIdAndVersion(), notNullValue());
}
}
});
getters[t].start();
}
phaser.arriveAndAwaitAdvance();
for (int i = 0; i < numDocs; i++) {
engine.index(indexForDoc(createParsedDoc("more-" + i, null)));
}
for (Thread getter : getters) {
getter.join();
}
assertThat(refreshCount.get(), lessThanOrEqualTo(refreshCountBeforeGet + 1));
}
}
}
@Test
public void testRefreshDoesNotBlockClosing() throws Exception {
final CountDownLatch refreshStarted = new CountDownLatch(1);
final CountDownLatch engineClosed = new CountDownLatch(1);
final ReferenceManager.RefreshListener refreshListener = new ReferenceManager.RefreshListener() {
@Override
public void beforeRefresh() {
refreshStarted.countDown();
try {
engineClosed.await();
} catch (InterruptedException e) {
throw new AssertionError(e);
}
}
@Override
public void afterRefresh(boolean didRefresh) {
assertFalse(didRefresh);
}
};
try (Store store = createStore()) {
final EngineConfig config = config(
defaultSettings,
store,
createTempDir(),
newMergePolicy(),
null,
refreshListener,
null,
null
);
try (InternalEngine engine = createEngine(config)) {
if (randomBoolean()) {
engine.index(indexForDoc(createParsedDoc("id", null)));
}
threadPool.executor(ThreadPool.Names.REFRESH).execute(() ->
expectThrows(AlreadyClosedException.class,
() -> engine.refresh("test", randomFrom(Engine.SearcherScope.values()), true)));
refreshStarted.await();
engine.close();
engineClosed.countDown();
}
}
}
@Test
public void testDeleteDocumentFailuresShouldFailEngine() throws IOException {
engine.close();
final Settings settings = Settings.builder()
.put(defaultSettings.getSettings())
.build();
final IndexSettings indexSettings = IndexSettingsModule.newIndexSettings(
IndexMetadata.builder(defaultSettings.getIndexMetadata()).settings(settings).build());
final AtomicReference<ThrowingIndexWriter> iw = new AtomicReference<>();
try (Store store = createStore();
InternalEngine engine = createEngine(
(dir, iwc) -> {
iw.set(new ThrowingIndexWriter(dir, iwc));
return iw.get();
},
null,
null,
config(indexSettings, store, createTempDir(), NoMergePolicy.INSTANCE, null))) {
engine.index(new Engine.Index(
newUid("0"), InternalEngineTests.createParsedDoc("0", null), UNASSIGNED_SEQ_NO, primaryTerm.get(),
Versions.MATCH_DELETED, VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY, System.nanoTime(), -1, false, UNASSIGNED_SEQ_NO, 0));
Engine.Delete op = new Engine.Delete(
"0",
newUid("0"),
UNASSIGNED_SEQ_NO,
primaryTerm.get(),
Versions.MATCH_ANY,
VersionType.INTERNAL,
Engine.Operation.Origin.PRIMARY,
System.nanoTime(),
UNASSIGNED_SEQ_NO,
0
);
iw.get().setThrowFailure(() -> new IllegalArgumentException("fatal"));
final IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> engine.delete(op));
assertThat(e.getMessage(), equalTo("fatal"));
assertThat(engine.isClosed.get(), is(true));
assertThat(engine.failedEngine.get(), not(nullValue()));
assertThat(engine.failedEngine.get(), instanceOf(IllegalArgumentException.class));
assertThat(engine.failedEngine.get().getMessage(), equalTo("fatal"));
}
}
public static <T> void assertThatIfAssertionEnabled(T actual, Matcher<? super T> matcher) {
if (InternalEngineTests.class.desiredAssertionStatus()) {
assertThat(actual, matcher);
}
}
@Test
public void testProducesStoredFieldsReader() throws Exception {
// Make sure that the engine produces a SequentialStoredFieldsLeafReader.
// This is required for optimizations on SourceLookup to work, which is in-turn useful for runtime fields.
ParsedDocument doc = testParsedDocument("1", null, testDocumentWithTextField("test"),
new BytesArray("{}".getBytes(Charset.defaultCharset())), null);
Engine.Index operation = randomBoolean() ?
appendOnlyPrimary(doc, false, 1)
: appendOnlyReplica(doc, false, 1, randomIntBetween(0, 5));
engine.index(operation);
engine.refresh("test");
try (Engine.Searcher searcher = engine.acquireSearcher("test")) {
IndexReader reader = searcher.getIndexReader();
assertThat(reader.leaves().size(), Matchers.greaterThanOrEqualTo(1));
for (LeafReaderContext context: reader.leaves()) {
assertThat(context.reader(), Matchers.instanceOf(SequentialStoredFieldsLeafReader.class));
SequentialStoredFieldsLeafReader lf = (SequentialStoredFieldsLeafReader) context.reader();
assertNotNull(lf.getSequentialStoredFieldsReader());
}
}
}
}
| Fix flaky testTranslogReplayWithFailure
Closes https://github.com/crate/crate/issues/12175
See https://github.com/elastic/elasticsearch/commit/02d1fbd5a786ffbbe6fe0b48649f2a5fb3707393
| server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java | Fix flaky testTranslogReplayWithFailure |
|
Java | apache-2.0 | b88aa1d18d43d37c33232b3973f92f36f06c0469 | 0 | Praveen2112/presto,erichwang/presto,dain/presto,losipiuk/presto,ebyhr/presto,11xor6/presto,ebyhr/presto,ebyhr/presto,electrum/presto,Praveen2112/presto,dain/presto,dain/presto,electrum/presto,erichwang/presto,treasure-data/presto,smartnews/presto,treasure-data/presto,electrum/presto,ebyhr/presto,dain/presto,Praveen2112/presto,smartnews/presto,dain/presto,ebyhr/presto,smartnews/presto,treasure-data/presto,erichwang/presto,losipiuk/presto,smartnews/presto,11xor6/presto,Praveen2112/presto,Praveen2112/presto,treasure-data/presto,erichwang/presto,losipiuk/presto,losipiuk/presto,electrum/presto,electrum/presto,treasure-data/presto,11xor6/presto,treasure-data/presto,smartnews/presto,erichwang/presto,11xor6/presto,11xor6/presto,losipiuk/presto | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.testng.services;
import com.google.common.annotations.VisibleForTesting;
import org.testng.IClassListener;
import org.testng.ITestClass;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
import static com.google.common.base.Throwables.getStackTraceAsString;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.prestosql.testng.services.Listeners.reportListenerFailure;
import static java.util.stream.Collectors.joining;
public class ReportUnannotatedMethods
implements IClassListener
{
@Override
public void onBeforeClass(ITestClass testClass)
{
try {
reportUnannotatedTestMethods(testClass);
}
catch (RuntimeException | Error e) {
reportListenerFailure(
ReportUnannotatedMethods.class,
"Failed to process %s: \n%s",
testClass,
getStackTraceAsString(e));
}
}
private void reportUnannotatedTestMethods(ITestClass testClass)
{
Class<?> realClass = testClass.getRealClass();
if (realClass.getSuperclass() != null &&
"io.prestosql.tempto.internal.convention.ConventionBasedTestProxyGenerator$ConventionBasedTestProxy".equals(realClass.getSuperclass().getName())) {
// Ignore tempto generated convention tests.
return;
}
List<Method> unannotatedTestMethods = findUnannotatedTestMethods(realClass);
if (!unannotatedTestMethods.isEmpty()) {
reportListenerFailure(
ReportUnannotatedMethods.class,
"Test class %s has methods which are public but not explicitly annotated. Are they missing @Test?%s",
realClass.getName(),
unannotatedTestMethods.stream()
.map(Method::toString)
.collect(joining("\n\t\t", "\n\t\t", "")));
}
}
@VisibleForTesting
static List<Method> findUnannotatedTestMethods(Class<?> realClass)
{
return Arrays.stream(realClass.getMethods())
.filter(method -> method.getDeclaringClass() != Object.class)
.filter(method -> !Modifier.isStatic(method.getModifiers()))
.filter(method -> !method.isBridge())
.filter(method -> !isTestMethod(method))
.filter(method -> !isTemptoSpiMethod(method))
.collect(toImmutableList());
}
@Override
public void onAfterClass(ITestClass testClass) {}
/**
* Is explicitly annotated as @Test, @BeforeMethod, @DataProvider, etc.
*/
private static boolean isTestMethod(Method method)
{
if (isTestAnnotated(method)) {
return true;
}
if (method.getDeclaringClass() == Object.class) {
return true;
}
Class<?> superclass = method.getDeclaringClass().getSuperclass();
Method overridden;
try {
// Simplistic override detection
overridden = superclass.getMethod(method.getName(), method.getParameterTypes());
}
catch (NoSuchMethodException ignored) {
return false;
}
return isTestMethod(overridden);
}
private static boolean isTestAnnotated(Method method)
{
return Arrays.stream(method.getAnnotations())
.map(Annotation::annotationType)
.anyMatch(annotationClass -> {
if ("org.openjdk.jmh.annotations.Benchmark".equals(annotationClass.getName())) {
return true;
}
if (org.testng.annotations.Test.class.getPackage().equals(annotationClass.getPackage())) {
// testng annotation (@Test, @Before*, @DataProvider, etc.)
return true;
}
if (isTemptoClass(annotationClass)) {
// tempto annotation (@BeforeTestWithContext, @AfterTestWithContext)
return true;
}
return false;
});
}
private static boolean isTemptoSpiMethod(Method method)
{
return Stream.of(method.getDeclaringClass().getInterfaces())
.filter(ReportUnannotatedMethods::isTemptoClass)
.map(Class::getMethods)
.flatMap(Stream::of)
.anyMatch(actualMethod -> overrides(method, actualMethod));
}
private static boolean overrides(Method first, Method second)
{
if (!first.getName().equals(second.getName())) {
return false;
}
if (first.getParameterTypes().length != second.getParameterTypes().length) {
return false;
}
for (int i = 0; i < first.getParameterTypes().length; i++) {
if (!first.getParameterTypes()[i].getName().equals(second.getParameterTypes()[i].getName())) {
return false;
}
}
return true;
}
public static boolean isTemptoClass(Class<?> aClass)
{
return "io.prestosql.tempto".equals(aClass.getPackage().getName());
}
}
| presto-testng-services/src/main/java/io/prestosql/testng/services/ReportUnannotatedMethods.java | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.testng.services;
import com.google.common.annotations.VisibleForTesting;
import org.testng.IClassListener;
import org.testng.ITestClass;
import java.lang.annotation.Annotation;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Stream;
import static com.google.common.base.Throwables.getStackTraceAsString;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static io.prestosql.testng.services.Listeners.reportListenerFailure;
import static java.util.stream.Collectors.joining;
public class ReportUnannotatedMethods
implements IClassListener
{
@Override
public void onBeforeClass(ITestClass testClass)
{
try {
reportUnannotatedTestMethods(testClass);
}
catch (RuntimeException | Error e) {
reportListenerFailure(
ReportUnannotatedMethods.class,
"Failed to process %s: \n%s",
testClass,
getStackTraceAsString(e));
}
}
private void reportUnannotatedTestMethods(ITestClass testClass)
{
Class<?> realClass = testClass.getRealClass();
List<Method> unannotatedTestMethods = findUnannotatedTestMethods(realClass);
if (!unannotatedTestMethods.isEmpty()) {
reportListenerFailure(
ReportUnannotatedMethods.class,
"Test class %s has methods which are public but not explicitly annotated. Are they missing @Test?%s",
realClass.getName(),
unannotatedTestMethods.stream()
.map(Method::toString)
.collect(joining("\n\t\t", "\n\t\t", "")));
}
}
@VisibleForTesting
static List<Method> findUnannotatedTestMethods(Class<?> realClass)
{
return Arrays.stream(realClass.getMethods())
.filter(method -> method.getDeclaringClass() != Object.class)
.filter(method -> !Modifier.isStatic(method.getModifiers()))
.filter(method -> !method.isBridge())
.filter(method -> !isTestMethod(method))
.filter(method -> !isTemptoSpiMethod(method))
.collect(toImmutableList());
}
@Override
public void onAfterClass(ITestClass testClass) {}
/**
* Is explicitly annotated as @Test, @BeforeMethod, @DataProvider, etc.
*/
private static boolean isTestMethod(Method method)
{
if (isTestAnnotated(method)) {
return true;
}
if (method.getDeclaringClass() == Object.class) {
return true;
}
Class<?> superclass = method.getDeclaringClass().getSuperclass();
Method overridden;
try {
// Simplistic override detection
overridden = superclass.getMethod(method.getName(), method.getParameterTypes());
}
catch (NoSuchMethodException ignored) {
return false;
}
return isTestMethod(overridden);
}
private static boolean isTestAnnotated(Method method)
{
return Arrays.stream(method.getAnnotations())
.map(Annotation::annotationType)
.anyMatch(annotationClass -> {
if ("org.openjdk.jmh.annotations.Benchmark".equals(annotationClass.getName())) {
return true;
}
if (org.testng.annotations.Test.class.getPackage().equals(annotationClass.getPackage())) {
// testng annotation (@Test, @Before*, @DataProvider, etc.)
return true;
}
if (isTemptoClass(annotationClass)) {
// tempto annotation (@BeforeTestWithContext, @AfterTestWithContext)
return true;
}
return false;
});
}
private static boolean isTemptoSpiMethod(Method method)
{
return Stream.of(method.getDeclaringClass().getInterfaces())
.filter(ReportUnannotatedMethods::isTemptoClass)
.map(Class::getMethods)
.flatMap(Stream::of)
.anyMatch(actualMethod -> overrides(method, actualMethod));
}
private static boolean overrides(Method first, Method second)
{
if (!first.getName().equals(second.getName())) {
return false;
}
if (first.getParameterTypes().length != second.getParameterTypes().length) {
return false;
}
for (int i = 0; i < first.getParameterTypes().length; i++) {
if (!first.getParameterTypes()[i].getName().equals(second.getParameterTypes()[i].getName())) {
return false;
}
}
return true;
}
public static boolean isTemptoClass(Class<?> aClass)
{
return "io.prestosql.tempto".equals(aClass.getPackage().getName());
}
}
| Ignore tempto generated convention tests
| presto-testng-services/src/main/java/io/prestosql/testng/services/ReportUnannotatedMethods.java | Ignore tempto generated convention tests |
|
Java | apache-2.0 | ded347448aa3063a6a5b3f3cbcf3c3a5e4815a6f | 0 | Nanopublication/nanopub-store-api,Nanopublication/nanopub-store-api | package nl.lumc.nanopub.store.dao;
import org.openrdf.model.URI;
import java.util.List;
import ch.tkuhn.nanopub.Nanopub;
/**
*
* @author Eelke, Mark, Reinout, Rajaram
* @since 30-10-2013
* @version 0.1
*/
public interface NanopubDao {
public URI storeNanopub(Nanopub nanopub) throws NanopubDaoException;
public Nanopub retrieveNanopub(URI uri) throws NanopubDaoException;
public List<URI> listNanopubs() throws NanopubDaoException;
}
| src/main/java/nl/lumc/nanopub/store/dao/NanopubDao.java | package nl.lumc.nanopub.store.dao;
import java.net.URI;
import java.util.List;
import ch.tkuhn.nanopub.Nanopub;
/**
*
* @author Eelke, Mark, Reinout, Rajaram
* @since 30-10-2013
* @version 0.1
*/
public interface NanopubDao {
public URI storeNanopub(Nanopub nanopub);
public Nanopub retrieveNanopub(URI uri);
public List<URI> listNanopubs();
}
| Added custom exceptions to the Nanopub DAO. | src/main/java/nl/lumc/nanopub/store/dao/NanopubDao.java | Added custom exceptions to the Nanopub DAO. |
|
Java | apache-2.0 | 683c9904a80c36ed25b1653f0c84dc2e7d797017 | 0 | arnost-starosta/midpoint,rpudil/midpoint,rpudil/midpoint,arnost-starosta/midpoint,Pardus-Engerek/engerek,arnost-starosta/midpoint,Pardus-Engerek/engerek,PetrGasparik/midpoint,Pardus-Engerek/engerek,arnost-starosta/midpoint,rpudil/midpoint,PetrGasparik/midpoint,PetrGasparik/midpoint,PetrGasparik/midpoint,Pardus-Engerek/engerek,arnost-starosta/midpoint,rpudil/midpoint | /*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.page.admin.configuration;
import com.evolveum.midpoint.model.api.ModelExecuteOptions;
import com.evolveum.midpoint.model.api.ModelPublicConstants;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.PrismProperty;
import com.evolveum.midpoint.prism.PrismPropertyDefinition;
import com.evolveum.midpoint.prism.match.PolyStringNormMatchingRule;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.polystring.PolyStringNormalizer;
import com.evolveum.midpoint.prism.query.*;
import com.evolveum.midpoint.schema.GetOperationOptions;
import com.evolveum.midpoint.schema.SelectorOptions;
import com.evolveum.midpoint.schema.constants.ObjectTypes;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.security.api.AuthorizationConstants;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.task.api.TaskManager;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.logging.LoggingUtils;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.web.application.AuthorizationAction;
import com.evolveum.midpoint.web.application.PageDescriptor;
import com.evolveum.midpoint.web.component.BasicSearchPanel;
import com.evolveum.midpoint.web.component.data.BoxedTablePanel;
import com.evolveum.midpoint.web.component.data.RepositoryObjectDataProvider;
import com.evolveum.midpoint.web.component.data.Table;
import com.evolveum.midpoint.web.component.data.column.CheckBoxHeaderColumn;
import com.evolveum.midpoint.web.component.data.column.InlineMenuHeaderColumn;
import com.evolveum.midpoint.web.component.data.column.InlineMenuable;
import com.evolveum.midpoint.web.component.data.column.LinkColumn;
import com.evolveum.midpoint.web.component.data.column.LinkPanel;
import com.evolveum.midpoint.web.component.data.column.TwoValueLinkPanel;
import com.evolveum.midpoint.web.component.dialog.ConfirmationDialog;
import com.evolveum.midpoint.web.component.dialog.DeleteAllDialog;
import com.evolveum.midpoint.web.component.dialog.DeleteAllDto;
import com.evolveum.midpoint.web.component.menu.cog.InlineMenuItem;
import com.evolveum.midpoint.web.component.util.LoadableModel;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import com.evolveum.midpoint.web.page.admin.configuration.component.DebugButtonPanel;
import com.evolveum.midpoint.web.page.admin.configuration.component.HeaderMenuAction;
import com.evolveum.midpoint.web.page.admin.configuration.component.PageDebugDownloadBehaviour;
import com.evolveum.midpoint.web.page.admin.configuration.dto.DebugConfDialogDto;
import com.evolveum.midpoint.web.page.admin.configuration.dto.DebugObjectItem;
import com.evolveum.midpoint.web.page.admin.configuration.dto.DebugSearchDto;
import com.evolveum.midpoint.web.page.admin.dto.ObjectViewDto;
import com.evolveum.midpoint.web.session.ConfigurationStorage;
import com.evolveum.midpoint.web.session.UserProfileStorage;
import com.evolveum.midpoint.web.util.ObjectTypeGuiDescriptor;
import com.evolveum.midpoint.web.util.WebMiscUtil;
import com.evolveum.midpoint.web.util.WebModelUtils;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import com.evolveum.prism.xml.ns._public.query_3.QueryType;
import org.apache.commons.lang.StringUtils;
import org.apache.wicket.Component;
import org.apache.wicket.MarkupContainer;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.ajax.form.OnChangeAjaxBehavior;
import org.apache.wicket.ajax.markup.html.form.AjaxCheckBox;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.extensions.markup.html.repeater.data.table.AbstractColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.DataTable;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.PropertyColumn;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.PropertyListView;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.model.*;
import org.apache.wicket.model.util.ListModel;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import javax.swing.text.html.ListView;
import javax.xml.namespace.QName;
import java.util.*;
/**
* @author lazyman
*/
@PageDescriptor(url = "/admin/config/debugs", action = {
@AuthorizationAction(actionUri = PageAdminConfiguration.AUTH_CONFIGURATION_ALL, label = PageAdminConfiguration.AUTH_CONFIGURATION_ALL_LABEL, description = PageAdminConfiguration.AUTH_CONFIGURATION_ALL_DESCRIPTION),
@AuthorizationAction(actionUri = AuthorizationConstants.AUTZ_UI_CONFIGURATION_DEBUGS_URL, label = "PageDebugList.auth.debugs.label", description = "PageDebugList.auth.debugs.description") })
public class PageDebugList extends PageAdminConfiguration {
private static final Trace LOGGER = TraceManager.getTrace(PageDebugList.class);
private static final String DOT_CLASS = PageDebugList.class.getName() + ".";
private static final String OPERATION_DELETE_OBJECTS = DOT_CLASS + "deleteObjects";
private static final String OPERATION_LAXATIVE_DELETE = DOT_CLASS + "laxativeDelete";
private static final String OPERATION_LOAD_RESOURCES = DOT_CLASS + "loadResources";
private static final String OPERATION_DELETE_SHADOWS = DOT_CLASS + "deleteShadows";
private static final String ID_CONFIRM_DELETE_POPUP = "confirmDeletePopup";
private static final String ID_MAIN_FORM = "mainForm";
private static final String ID_ZIP_CHECK = "zipCheck";
private static final String ID_TABLE = "table";
private static final String ID_CHOICE = "choice";
private static final String ID_EXPORT = "export";
private static final String ID_EXPORT_ALL = "exportAll";
private static final String ID_SEARCH_FORM = "searchForm";
private static final String ID_BASIC_SEARCH = "basicSearch";
private static final String ID_DELETE_ALL_DIALOG = "confirmDeleteAll";
private static final String ID_RESOURCE = "resource";
private static final String ID_TABLE_HEADER = "tableHeader";
private static final Integer DELETE_LOG_INTERVAL = 50;
// search form model;
private IModel<DebugSearchDto> searchModel;
// confirmation dialog model
private IModel<DebugConfDialogDto> confDialogModel;
private IModel<List<ObjectViewDto>> resourcesModel;
public PageDebugList() {
this(true);
}
public PageDebugList(boolean clearPagingInSession) {
searchModel = new LoadableModel<DebugSearchDto>(false) {
@Override
protected DebugSearchDto load() {
ConfigurationStorage storage = getSessionStorage().getConfiguration();
return storage.getDebugSearchDto();
}
};
confDialogModel = new LoadableModel<DebugConfDialogDto>() {
@Override
protected DebugConfDialogDto load() {
return new DebugConfDialogDto();
}
};
resourcesModel = new LoadableModel<List<ObjectViewDto>>() {
@Override
protected List<ObjectViewDto> load() {
return loadResources();
}
};
getSessionStorage().clearPagingInSession(clearPagingInSession);
initLayout();
}
private List<ObjectViewDto> loadResources() {
List<ObjectViewDto> objects = new ArrayList<>();
try {
OperationResult result = new OperationResult(OPERATION_LOAD_RESOURCES);
List<PrismObject<ResourceType>> list = WebModelUtils.searchObjects(ResourceType.class, null,
SelectorOptions.createCollection(GetOperationOptions.createRaw()), result, this, null);
for (PrismObject obj : list) {
ObjectViewDto dto = new ObjectViewDto(obj.getOid(), WebMiscUtil.getName(obj));
objects.add(dto);
}
} catch (Exception ex) {
// todo implement error handling
}
Collections.sort(objects, new Comparator<ObjectViewDto>() {
@Override
public int compare(ObjectViewDto o1, ObjectViewDto o2) {
return String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName());
}
});
return objects;
}
private void initLayout() {
DeleteAllDialog deleteAllDialog = new DeleteAllDialog(ID_DELETE_ALL_DIALOG,
createStringResource("pageDebugList.dialog.title.deleteAll")) {
@Override
public void yesPerformed(AjaxRequestTarget target) {
close(target);
deleteAllIdentitiesConfirmed(target, getModel().getObject());
}
};
add(deleteAllDialog);
ConfirmationDialog deleteConfirm = new ConfirmationDialog(ID_CONFIRM_DELETE_POPUP,
createStringResource("pageDebugList.dialog.title.confirmDelete"),
createDeleteConfirmString()) {
@Override
public void yesPerformed(AjaxRequestTarget target) {
close(target);
DebugConfDialogDto dto = confDialogModel.getObject();
switch (dto.getOperation()) {
case DELETE_ALL_TYPE:
deleteAllTypeConfirmed(target);
break;
case DELETE_SELECTED:
deleteSelectedConfirmed(target, dto.getObjects());
break;
case DELETE_RESOURCE_SHADOWS:
deleteAllShadowsOnResourceConfirmed(target);
break;
}
}
@Override
public boolean getLabelEscapeModelStrings() {
return false;
}
};
add(deleteConfirm);
Form main = new Form(ID_MAIN_FORM);
add(main);
DebugSearchDto dto = searchModel.getObject();
Class type = dto.getType().getClassDefinition();
addOrReplaceTable(new RepositoryObjectDataProvider(this, type) {
@Override
protected void saveProviderPaging(ObjectQuery query, ObjectPaging paging) {
ConfigurationStorage storage = getSessionStorage().getConfiguration();
storage.setDebugSearchPaging(paging);
}
});
PageDebugDownloadBehaviour ajaxDownloadBehavior = new PageDebugDownloadBehaviour();
main.add(ajaxDownloadBehavior);
}
private void initDownload(AjaxRequestTarget target, Class<? extends ObjectType> type, ObjectQuery query) {
List<PageDebugDownloadBehaviour> list = get(ID_MAIN_FORM)
.getBehaviors(PageDebugDownloadBehaviour.class);
PageDebugDownloadBehaviour downloadBehaviour = list.get(0);
downloadBehaviour.setType(type);
downloadBehaviour.setQuery(query);
downloadBehaviour.setUseZip(hasToZip());
downloadBehaviour.initiate(target);
}
private void addOrReplaceTable(RepositoryObjectDataProvider provider) {
provider.setQuery(createQuery());
Form mainForm = (Form) get(ID_MAIN_FORM);
BoxedTablePanel table = new BoxedTablePanel(ID_TABLE, provider, initColumns(provider.getType()),
UserProfileStorage.TableId.CONF_DEBUG_LIST_PANEL,
(int) getItemsPerPage(UserProfileStorage.TableId.CONF_DEBUG_LIST_PANEL)) {
@Override
protected WebMarkupContainer createHeader(String headerId) {
return new SearchFragment(headerId, ID_TABLE_HEADER, PageDebugList.this, searchModel,
resourcesModel);
}
};
table.setOutputMarkupId(true);
ConfigurationStorage storage = getSessionStorage().getConfiguration();
table.setCurrentPage(storage.getDebugSearchPaging());
mainForm.addOrReplace(table);
}
private List<IColumn> initColumns(final Class<? extends ObjectType> type) {
List<IColumn> columns = new ArrayList<>();
IColumn column = new CheckBoxHeaderColumn<ObjectType>();
columns.add(column);
column = new LinkColumn<DebugObjectItem>(createStringResource("pageDebugList.name"),
DebugObjectItem.F_NAME, DebugObjectItem.F_NAME) {
@Override
public void populateItem(Item<ICellPopulator<DebugObjectItem>> cellItem, String componentId,
final IModel<DebugObjectItem> rowModel) {
TwoValueLinkPanel panel = new TwoValueLinkPanel(componentId,
new PropertyModel<String>(rowModel, DebugObjectItem.F_NAME),
new PropertyModel<String>(rowModel, DebugObjectItem.F_OID)){
@Override
public void onClick(AjaxRequestTarget target) {
DebugObjectItem object = rowModel.getObject();
objectEditPerformed(target, object.getOid(), type);
}
};
cellItem.add(panel);
}
};
columns.add(column);
columns.add(new PropertyColumn(createStringResource("pageDebugList.description"),
DebugObjectItem.F_DESCRIPTION));
if (ShadowType.class.isAssignableFrom(type)) {
columns.add(new PropertyColumn(createStringResource("pageDebugList.resourceName"),
DebugObjectItem.F_RESOURCE_NAME));
columns.add(new PropertyColumn(createStringResource("pageDebugList.resourceType"),
DebugObjectItem.F_RESOURCE_TYPE));
}
column = new AbstractColumn<DebugObjectItem, String>(new Model(), null) {
@Override
public String getCssClass() {
return "debug-list-buttons";
}
@Override
public void populateItem(Item<ICellPopulator<DebugObjectItem>> cellItem, String componentId,
IModel<DebugObjectItem> rowModel) {
cellItem.add(new DebugButtonPanel<DebugObjectItem>(componentId, rowModel) {
@Override
public void deletePerformed(AjaxRequestTarget target, IModel<DebugObjectItem> model) {
deleteSelected(target, model.getObject());
}
@Override
public void exportPerformed(AjaxRequestTarget target, IModel<DebugObjectItem> model) {
exportSelected(target, model.getObject());
}
});
}
};
columns.add(column);
column = new InlineMenuHeaderColumn<InlineMenuable>(initInlineMenu()) {
@Override
public void populateItem(Item<ICellPopulator<InlineMenuable>> cellItem, String componentId,
IModel<InlineMenuable> rowModel) {
// we don't need row inline menu
cellItem.add(new Label(componentId));
}
};
columns.add(column);
return columns;
}
private List<InlineMenuItem> initInlineMenu() {
List<InlineMenuItem> headerMenuItems = new ArrayList<>();
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.exportSelected"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
exportSelected(target, null);
}
}));
headerMenuItems
.add(new InlineMenuItem(createStringResource("pageDebugList.menu.exportAllSelectedType"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
exportAllType(target);
}
}));
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.exportAll"), true,
new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
exportAll(target);
}
}));
headerMenuItems.add(new InlineMenuItem());
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteSelected"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
deleteSelected(target, null);
}
}));
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteAllType"), true,
new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
deleteAllType(target);
}
}));
headerMenuItems
.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteShadowsOnResource"),
new Model(true), new AbstractReadOnlyModel<Boolean>() {
@Override
public Boolean getObject() {
DebugSearchDto dto = searchModel.getObject();
return ObjectTypes.SHADOW.equals(dto.getType());
}
}, false, new HeaderMenuAction(this) {
@Override
public void onClick(AjaxRequestTarget target) {
deleteAllShadowsOnResource(target);
}
}));
headerMenuItems.add(new InlineMenuItem());
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteAllIdentities"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
deleteAllIdentities(target);
}
}));
return headerMenuItems;
}
private boolean hasToZip() {
BoxedTablePanel table = (BoxedTablePanel) getListTable();
SearchFragment header = (SearchFragment) table.getHeader();
AjaxCheckBox zipCheck = header.getZipCheck();
return zipCheck.getModelObject();
}
private Table getListTable() {
return (Table) get(createComponentPath(ID_MAIN_FORM, ID_TABLE));
}
private void listObjectsPerformed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
ObjectTypes selected = dto.getType();
RepositoryObjectDataProvider provider = getTableDataProvider();
provider.setQuery(createQuery());
if (selected != null) {
provider.setType(selected.getClassDefinition());
addOrReplaceTable(provider);
}
// save object type category to session storage, used by back button
ConfigurationStorage storage = getSessionStorage().getConfiguration();
storage.setDebugSearchDto(dto);
Table table = getListTable();
target.add((Component) table);
}
private ObjectQuery createQuery() {
DebugSearchDto dto = searchModel.getObject();
List<ObjectFilter> filters = new ArrayList<>();
if (ObjectTypes.SHADOW.equals(dto.getType()) && dto.getResource() != null) {
String oid = dto.getResource().getOid();
RefFilter ref = RefFilter.createReferenceEqual(ShadowType.F_RESOURCE_REF, ShadowType.class,
getPrismContext(), oid);
filters.add(ref);
}
if (StringUtils.isNotEmpty(dto.getText())) {
String nameText = dto.getText();
PolyStringNormalizer normalizer = getPrismContext().getDefaultPolyStringNormalizer();
String normalizedString = normalizer.normalize(nameText);
ObjectFilter substring = SubstringFilter.createSubstring(ObjectType.F_NAME, ObjectType.class,
getPrismContext(), PolyStringNormMatchingRule.NAME, normalizedString);
filters.add(substring);
}
if (filters.isEmpty()) {
return null;
}
ObjectFilter filter = filters.size() > 1 ? AndFilter.createAnd(filters) : filters.get(0);
ObjectQuery query = new ObjectQuery();
query.setFilter(filter);
return query;
}
private void objectEditPerformed(AjaxRequestTarget target, String oid, Class<? extends ObjectType> type) {
PageParameters parameters = new PageParameters();
parameters.add(PageDebugView.PARAM_OBJECT_ID, oid);
parameters.add(PageDebugView.PARAM_OBJECT_TYPE, type.getSimpleName());
setResponsePage(PageDebugView.class, parameters);
}
private RepositoryObjectDataProvider getTableDataProvider() {
Table tablePanel = getListTable();
DataTable table = tablePanel.getDataTable();
return (RepositoryObjectDataProvider) table.getDataProvider();
}
private IModel<String> createDeleteConfirmString() {
return new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
DebugConfDialogDto dto = confDialogModel.getObject();
switch (dto.getOperation()) {
case DELETE_ALL_TYPE:
String key = ObjectTypeGuiDescriptor.getDescriptor(dto.getType())
.getLocalizationKey();
String type = createStringResource(key).getString();
return createStringResource("pageDebugList.message.deleteAllType", type).getString();
case DELETE_SELECTED:
List<DebugObjectItem> selectedList = dto.getObjects();
if (selectedList.size() > 1) {
return createStringResource("pageDebugList.message.deleteSelectedConfirm",
selectedList.size()).getString();
}
DebugObjectItem selectedItem = selectedList.get(0);
return createStringResource("pageDebugList.message.deleteObjectConfirm",
selectedItem.getName()).getString();
case DELETE_RESOURCE_SHADOWS:
DebugSearchDto search = searchModel.getObject();
return createStringResource("pageDebugList.messsage.deleteAllResourceShadows",
search.getResource().getName()).getString();
}
return "";
}
};
}
private void deleteAllIdentitiesConfirmed(AjaxRequestTarget target, DeleteAllDto dto) {
Collection<SelectorOptions<GetOperationOptions>> options = new ArrayList<>();
GetOperationOptions opt = GetOperationOptions.createRaw();
options.add(SelectorOptions.create(ItemPath.EMPTY_PATH, opt));
OperationResult result = new OperationResult(OPERATION_LAXATIVE_DELETE);
try {
if (dto.getDeleteUsers()) {
ObjectQuery query = createDeleteAllUsersQuery();
deleteObjectsAsync(UserType.COMPLEX_TYPE, query, true, "Delete all users", result);
}
if (dto.getDeleteOrgs()) {
deleteObjectsAsync(OrgType.COMPLEX_TYPE, null, true, "Delete all orgs", result);
}
if (dto.getDeleteAccountShadow()) {
deleteAllShadowsConfirmed(result, true);
}
if (dto.getDeleteNonAccountShadow()) {
deleteAllShadowsConfirmed(result, false);
}
} catch (Exception ex) {
result.computeStatus(getString("pageDebugList.message.laxativeProblem"));
LoggingUtils.logException(LOGGER, getString("pageDebugList.message.laxativeProblem"), ex);
}
target.add(getFeedbackPanel());
result.recomputeStatus();
showResult(result);
}
private ObjectQuery createDeleteAllUsersQuery() {
InOidFilter inOid = InOidFilter.createInOid(SystemObjectsType.USER_ADMINISTRATOR.value());
NotFilter not = new NotFilter(inOid);
return ObjectQuery.createObjectQuery(not);
}
private void deleteAllShadowsConfirmed(OperationResult result, boolean deleteAccountShadows)
throws ObjectAlreadyExistsException, ObjectNotFoundException, SchemaException {
ObjectFilter kind = EqualFilter.createEqual(ShadowType.F_KIND, ShadowType.class, getPrismContext(),
null, ShadowKindType.ACCOUNT);
String taskName;
ObjectQuery query;
if (deleteAccountShadows) {
taskName = "Delete all account shadows";
query = ObjectQuery.createObjectQuery(kind);
} else {
taskName = "Delete all non-account shadows";
query = ObjectQuery.createObjectQuery(NotFilter.createNot(kind));
}
deleteObjectsAsync(ShadowType.COMPLEX_TYPE, query, true, taskName, result);
}
private void exportSelected(AjaxRequestTarget target, DebugObjectItem item) {
List<DebugObjectItem> selected = getSelectedData(target, item);
if (selected.isEmpty()) {
return;
}
List<String> oids = new ArrayList<>();
for (DebugObjectItem dItem : selected) {
oids.add(dItem.getOid());
}
ObjectFilter filter = InOidFilter.createInOid(oids);
DebugSearchDto searchDto = searchModel.getObject();
initDownload(target, searchDto.getType().getClassDefinition(), ObjectQuery.createObjectQuery(filter));
}
private void exportAllType(AjaxRequestTarget target) {
DebugSearchDto searchDto = searchModel.getObject();
initDownload(target, searchDto.getType().getClassDefinition(), null);
}
private void exportAll(AjaxRequestTarget target) {
initDownload(target, ObjectType.class, null);
}
private void deleteAllType(AjaxRequestTarget target) {
DebugSearchDto searchDto = searchModel.getObject();
DebugConfDialogDto dto = new DebugConfDialogDto(DebugConfDialogDto.Operation.DELETE_ALL_TYPE, null,
searchDto.getType().getClassDefinition());
confDialogModel.setObject(dto);
ModalWindow dialog = (ModalWindow) get(ID_CONFIRM_DELETE_POPUP);
dialog.show(target);
}
private List<DebugObjectItem> getSelectedData(AjaxRequestTarget target, DebugObjectItem item) {
List<DebugObjectItem> items;
if (item != null) {
items = new ArrayList<>();
items.add(item);
return items;
}
items = WebMiscUtil.getSelectedData(getListTable());
if (items.isEmpty()) {
warn(getString("pageDebugList.message.nothingSelected"));
target.add(getFeedbackPanel());
}
return items;
}
private void deleteSelected(AjaxRequestTarget target, DebugObjectItem item) {
List<DebugObjectItem> selected = getSelectedData(target, item);
if (selected.isEmpty()) {
return;
}
DebugSearchDto searchDto = searchModel.getObject();
DebugConfDialogDto dto = new DebugConfDialogDto(DebugConfDialogDto.Operation.DELETE_SELECTED,
selected, searchDto.getType().getClassDefinition());
confDialogModel.setObject(dto);
ModalWindow dialog = (ModalWindow) get(ID_CONFIRM_DELETE_POPUP);
dialog.show(target);
}
private void deleteAllIdentities(AjaxRequestTarget target) {
DeleteAllDialog dialog = (DeleteAllDialog) get(ID_DELETE_ALL_DIALOG);
dialog.show(target);
}
private void deleteAllTypeConfirmed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
LOGGER.debug("Deleting all of type {}", dto.getType());
OperationResult result = new OperationResult(OPERATION_DELETE_OBJECTS);
try {
ObjectQuery query = null;
if (ObjectTypes.USER.equals(dto.getType())) {
query = createDeleteAllUsersQuery();
}
QName type = dto.getType().getTypeQName();
deleteObjectsAsync(type, query, true, "Delete all of type " + type.getLocalPart(), result);
info(getString("pageDebugList.messsage.deleteAllOfType", dto.getType()));
} catch (Exception ex) {
result.recomputeStatus();
result.recordFatalError("Couldn't delete objects of type " + dto.getType(), ex);
LoggingUtils.logException(LOGGER, "Couldn't delete objects of type " + dto.getType(), ex);
}
showResult(result);
target.add(getFeedbackPanel());
}
private void deleteSelectedConfirmed(AjaxRequestTarget target, List<DebugObjectItem> items) {
DebugConfDialogDto dto = confDialogModel.getObject();
OperationResult result = new OperationResult(OPERATION_DELETE_OBJECTS);
for (DebugObjectItem bean : items) {
WebModelUtils.deleteObject(dto.getType(), bean.getOid(), ModelExecuteOptions.createRaw(), result,
this);
}
result.computeStatusIfUnknown();
RepositoryObjectDataProvider provider = getTableDataProvider();
provider.clearCache();
showResult(result);
target.add((Component) getListTable());
target.add(getFeedbackPanel());
}
private void clearSearchPerformed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
dto.setText(null);
listObjectsPerformed(target);
}
private void deleteAllShadowsOnResource(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
if (dto.getResource() == null) {
error(getString("pageDebugList.message.resourceNotSelected"));
target.add(getFeedbackPanel());
return;
}
LOGGER.debug("Displaying delete all shadows on resource {} confirmation dialog",
dto.getResource().getName());
DebugConfDialogDto dialogDto = new DebugConfDialogDto(
DebugConfDialogDto.Operation.DELETE_RESOURCE_SHADOWS, null, null);
confDialogModel.setObject(dialogDto);
ModalWindow dialog = (ModalWindow) get(ID_CONFIRM_DELETE_POPUP);
dialog.show(target);
}
private void deleteAllShadowsOnResourceConfirmed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
String resourceOid = dto.getResource().getOid();
LOGGER.debug("Deleting shadows on resource {}", resourceOid);
OperationResult result = new OperationResult(OPERATION_DELETE_SHADOWS);
try {
RefFilter ref = RefFilter.createReferenceEqual(ShadowType.F_RESOURCE_REF, ShadowType.class,
getPrismContext(), dto.getResource().getOid());
ObjectQuery objectQuery = ObjectQuery.createObjectQuery(ref);
QName type = ShadowType.COMPLEX_TYPE;
deleteObjectsAsync(type, objectQuery, true, "Delete shadows on " + dto.getResource().getName(),
result);
info(getString("pageDebugList.messsage.deleteAllShadowsStarted", dto.getResource().getName()));
} catch (Exception ex) {
result.recomputeStatus();
result.recordFatalError("Couldn't delete shadows.", ex);
LoggingUtils.logException(LOGGER, "Couldn't delete shadows", ex);
}
showResult(result);
target.add(getFeedbackPanel());
}
private void deleteObjectsAsync(QName type, ObjectQuery objectQuery, boolean raw, String taskName,
OperationResult result)
throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException {
Task task = createSimpleTask(result.getOperation());
task.setHandlerUri(ModelPublicConstants.DELETE_TASK_HANDLER_URI);
if (objectQuery == null) {
objectQuery = new ObjectQuery();
}
QueryType query = QueryJaxbConvertor.createQueryType(objectQuery, getPrismContext());
PrismPropertyDefinition queryDef = new PrismPropertyDefinition(
SchemaConstants.MODEL_EXTENSION_OBJECT_QUERY, QueryType.COMPLEX_TYPE, getPrismContext());
PrismProperty<QueryType> queryProp = queryDef.instantiate();
queryProp.setRealValue(query);
task.setExtensionProperty(queryProp);
PrismPropertyDefinition typeDef = new PrismPropertyDefinition(
SchemaConstants.MODEL_EXTENSION_OBJECT_TYPE, DOMUtil.XSD_QNAME, getPrismContext());
PrismProperty<QName> typeProp = typeDef.instantiate();
typeProp.setRealValue(type);
task.setExtensionProperty(typeProp);
PrismPropertyDefinition rawDef = new PrismPropertyDefinition(
SchemaConstants.MODEL_EXTENSION_OPTION_RAW, DOMUtil.XSD_BOOLEAN, getPrismContext());
PrismProperty<QName> rawProp = rawDef.instantiate();
rawProp.setRealValue(raw);
task.setExtensionProperty(rawProp);
task.setName(taskName);
task.savePendingModifications(result);
TaskManager taskManager = getTaskManager();
taskManager.switchToBackground(task, result);
}
private static class SearchFragment extends Fragment {
public SearchFragment(String id, String markupId, MarkupContainer markupProvider,
IModel<DebugSearchDto> model, IModel<List<ObjectViewDto>> resourcesModel) {
super(id, markupId, markupProvider, model);
initLayout(resourcesModel);
}
private void initLayout(IModel<List<ObjectViewDto>> resourcesModel) {
final Form searchForm = new Form(ID_SEARCH_FORM);
add(searchForm);
searchForm.setOutputMarkupId(true);
final IModel<DebugSearchDto> model = (IModel) getDefaultModel();
BasicSearchPanel<DebugSearchDto> basicSearch = new BasicSearchPanel<DebugSearchDto>(
ID_BASIC_SEARCH, model) {
@Override
protected IModel<String> createSearchTextModel() {
return new PropertyModel<>(model, DebugSearchDto.F_TEXT);
}
@Override
protected void searchPerformed(AjaxRequestTarget target) {
PageDebugList page = (PageDebugList) getPage();
page.listObjectsPerformed(target);
}
@Override
protected void clearSearchPerformed(AjaxRequestTarget target) {
PageDebugList page = (PageDebugList) getPage();
page.clearSearchPerformed(target);
}
};
searchForm.add(basicSearch);
IChoiceRenderer<ObjectTypes> renderer = new IChoiceRenderer<ObjectTypes>() {
@Override
public Object getDisplayValue(ObjectTypes object) {
ObjectTypeGuiDescriptor descr = ObjectTypeGuiDescriptor.getDescriptor(object);
String key = descr != null ? descr.getLocalizationKey()
: ObjectTypeGuiDescriptor.ERROR_LOCALIZATION_KEY;
return new StringResourceModel(key, getPage(), null).getString();
}
@Override
public String getIdValue(ObjectTypes object, int index) {
return object.getClassDefinition().getSimpleName();
}
};
DropDownChoice choice = new DropDownChoice(ID_CHOICE,
new PropertyModel(model, DebugSearchDto.F_TYPE), createChoiceModel(renderer), renderer);
searchForm.add(choice);
choice.add(new OnChangeAjaxBehavior() {
@Override
protected void onUpdate(AjaxRequestTarget target) {
PageDebugList page = (PageDebugList) getPage();
page.listObjectsPerformed(target);
}
});
DropDownChoice resource = new DropDownChoice(ID_RESOURCE,
new PropertyModel(model, DebugSearchDto.F_RESOURCE_OID), resourcesModel,
createResourceRenderer());
resource.setNullValid(true);
resource.add(new AjaxFormComponentUpdatingBehavior("onblur") {
@Override
protected void onUpdate(AjaxRequestTarget target) {
// nothing to do, it's here just to update model
}
});
resource.add(new VisibleEnableBehaviour() {
@Override
public boolean isVisible() {
DebugSearchDto dto = model.getObject();
return ObjectTypes.SHADOW.equals(dto.getType());
}
});
searchForm.add(resource);
AjaxCheckBox zipCheck = new AjaxCheckBox(ID_ZIP_CHECK, new Model<>(false)) {
@Override
protected void onUpdate(AjaxRequestTarget target) {
}
};
add(zipCheck);
}
public AjaxCheckBox getZipCheck() {
return (AjaxCheckBox) get(ID_ZIP_CHECK);
}
private IModel<List<ObjectTypes>> createChoiceModel(final IChoiceRenderer<ObjectTypes> renderer) {
return new LoadableModel<List<ObjectTypes>>(false) {
@Override
protected List<ObjectTypes> load() {
List<ObjectTypes> choices = new ArrayList<>();
Collections.addAll(choices, ObjectTypes.values());
choices.remove(ObjectTypes.OBJECT);
Collections.sort(choices, new Comparator<ObjectTypes>() {
@Override
public int compare(ObjectTypes o1, ObjectTypes o2) {
String str1 = (String) renderer.getDisplayValue(o1);
String str2 = (String) renderer.getDisplayValue(o2);
return String.CASE_INSENSITIVE_ORDER.compare(str1, str2);
}
});
return choices;
}
};
}
private IChoiceRenderer<ObjectViewDto> createResourceRenderer() {
return new IChoiceRenderer<ObjectViewDto>() {
@Override
public Object getDisplayValue(ObjectViewDto object) {
if (object == null) {
return getString("pageDebugList.resource");
}
return object.getName();
}
@Override
public String getIdValue(ObjectViewDto object, int index) {
return Integer.toString(index);
}
};
}
}
}
| gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/configuration/PageDebugList.java | /*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.web.page.admin.configuration;
import com.evolveum.midpoint.model.api.ModelExecuteOptions;
import com.evolveum.midpoint.model.api.ModelPublicConstants;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.PrismProperty;
import com.evolveum.midpoint.prism.PrismPropertyDefinition;
import com.evolveum.midpoint.prism.match.PolyStringNormMatchingRule;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.polystring.PolyStringNormalizer;
import com.evolveum.midpoint.prism.query.*;
import com.evolveum.midpoint.schema.GetOperationOptions;
import com.evolveum.midpoint.schema.SelectorOptions;
import com.evolveum.midpoint.schema.constants.ObjectTypes;
import com.evolveum.midpoint.schema.constants.SchemaConstants;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.security.api.AuthorizationConstants;
import com.evolveum.midpoint.task.api.Task;
import com.evolveum.midpoint.task.api.TaskManager;
import com.evolveum.midpoint.util.DOMUtil;
import com.evolveum.midpoint.util.exception.ObjectAlreadyExistsException;
import com.evolveum.midpoint.util.exception.ObjectNotFoundException;
import com.evolveum.midpoint.util.exception.SchemaException;
import com.evolveum.midpoint.util.logging.LoggingUtils;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.web.application.AuthorizationAction;
import com.evolveum.midpoint.web.application.PageDescriptor;
import com.evolveum.midpoint.web.component.BasicSearchPanel;
import com.evolveum.midpoint.web.component.data.BoxedTablePanel;
import com.evolveum.midpoint.web.component.data.RepositoryObjectDataProvider;
import com.evolveum.midpoint.web.component.data.Table;
import com.evolveum.midpoint.web.component.data.column.CheckBoxHeaderColumn;
import com.evolveum.midpoint.web.component.data.column.InlineMenuHeaderColumn;
import com.evolveum.midpoint.web.component.data.column.InlineMenuable;
import com.evolveum.midpoint.web.component.data.column.LinkColumn;
import com.evolveum.midpoint.web.component.data.column.LinkPanel;
import com.evolveum.midpoint.web.component.data.column.TwoValueLinkPanel;
import com.evolveum.midpoint.web.component.dialog.ConfirmationDialog;
import com.evolveum.midpoint.web.component.dialog.DeleteAllDialog;
import com.evolveum.midpoint.web.component.dialog.DeleteAllDto;
import com.evolveum.midpoint.web.component.menu.cog.InlineMenuItem;
import com.evolveum.midpoint.web.component.util.LoadableModel;
import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour;
import com.evolveum.midpoint.web.page.admin.configuration.component.DebugButtonPanel;
import com.evolveum.midpoint.web.page.admin.configuration.component.HeaderMenuAction;
import com.evolveum.midpoint.web.page.admin.configuration.component.PageDebugDownloadBehaviour;
import com.evolveum.midpoint.web.page.admin.configuration.dto.DebugConfDialogDto;
import com.evolveum.midpoint.web.page.admin.configuration.dto.DebugObjectItem;
import com.evolveum.midpoint.web.page.admin.configuration.dto.DebugSearchDto;
import com.evolveum.midpoint.web.page.admin.dto.ObjectViewDto;
import com.evolveum.midpoint.web.session.ConfigurationStorage;
import com.evolveum.midpoint.web.session.UserProfileStorage;
import com.evolveum.midpoint.web.util.ObjectTypeGuiDescriptor;
import com.evolveum.midpoint.web.util.WebMiscUtil;
import com.evolveum.midpoint.web.util.WebModelUtils;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import com.evolveum.prism.xml.ns._public.query_3.QueryType;
import org.apache.commons.lang.StringUtils;
import org.apache.wicket.Component;
import org.apache.wicket.MarkupContainer;
import org.apache.wicket.ajax.AjaxRequestTarget;
import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior;
import org.apache.wicket.ajax.form.OnChangeAjaxBehavior;
import org.apache.wicket.ajax.markup.html.form.AjaxCheckBox;
import org.apache.wicket.behavior.Behavior;
import org.apache.wicket.extensions.ajax.markup.html.modal.ModalWindow;
import org.apache.wicket.extensions.markup.html.repeater.data.grid.ICellPopulator;
import org.apache.wicket.extensions.markup.html.repeater.data.table.AbstractColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.DataTable;
import org.apache.wicket.extensions.markup.html.repeater.data.table.IColumn;
import org.apache.wicket.extensions.markup.html.repeater.data.table.PropertyColumn;
import org.apache.wicket.markup.html.WebMarkupContainer;
import org.apache.wicket.markup.html.basic.Label;
import org.apache.wicket.markup.html.form.DropDownChoice;
import org.apache.wicket.markup.html.form.Form;
import org.apache.wicket.markup.html.form.IChoiceRenderer;
import org.apache.wicket.markup.html.list.ListItem;
import org.apache.wicket.markup.html.list.PropertyListView;
import org.apache.wicket.markup.html.panel.Fragment;
import org.apache.wicket.markup.repeater.Item;
import org.apache.wicket.model.*;
import org.apache.wicket.model.util.ListModel;
import org.apache.wicket.request.mapper.parameter.PageParameters;
import javax.swing.text.html.ListView;
import javax.xml.namespace.QName;
import java.util.*;
/**
* @author lazyman
*/
@PageDescriptor(url = "/admin/config/debugs", action = {
@AuthorizationAction(actionUri = PageAdminConfiguration.AUTH_CONFIGURATION_ALL, label = PageAdminConfiguration.AUTH_CONFIGURATION_ALL_LABEL, description = PageAdminConfiguration.AUTH_CONFIGURATION_ALL_DESCRIPTION),
@AuthorizationAction(actionUri = AuthorizationConstants.AUTZ_UI_CONFIGURATION_DEBUGS_URL, label = "PageDebugList.auth.debugs.label", description = "PageDebugList.auth.debugs.description") })
public class PageDebugList extends PageAdminConfiguration {
private static final Trace LOGGER = TraceManager.getTrace(PageDebugList.class);
private static final String DOT_CLASS = PageDebugList.class.getName() + ".";
private static final String OPERATION_DELETE_OBJECTS = DOT_CLASS + "deleteObjects";
private static final String OPERATION_LAXATIVE_DELETE = DOT_CLASS + "laxativeDelete";
private static final String OPERATION_LOAD_RESOURCES = DOT_CLASS + "loadResources";
private static final String OPERATION_DELETE_SHADOWS = DOT_CLASS + "deleteShadows";
private static final String ID_CONFIRM_DELETE_POPUP = "confirmDeletePopup";
private static final String ID_MAIN_FORM = "mainForm";
private static final String ID_ZIP_CHECK = "zipCheck";
private static final String ID_TABLE = "table";
private static final String ID_CHOICE = "choice";
private static final String ID_EXPORT = "export";
private static final String ID_EXPORT_ALL = "exportAll";
private static final String ID_SEARCH_FORM = "searchForm";
private static final String ID_BASIC_SEARCH = "basicSearch";
private static final String ID_DELETE_ALL_DIALOG = "confirmDeleteAll";
private static final String ID_RESOURCE = "resource";
private static final String ID_TABLE_HEADER = "tableHeader";
private static final Integer DELETE_LOG_INTERVAL = 50;
// search form model;
private IModel<DebugSearchDto> searchModel;
// confirmation dialog model
private IModel<DebugConfDialogDto> confDialogModel;
private IModel<List<ObjectViewDto>> resourcesModel;
public PageDebugList() {
this(true);
}
public PageDebugList(boolean clearPagingInSession) {
searchModel = new LoadableModel<DebugSearchDto>(false) {
@Override
protected DebugSearchDto load() {
ConfigurationStorage storage = getSessionStorage().getConfiguration();
return storage.getDebugSearchDto();
}
};
confDialogModel = new LoadableModel<DebugConfDialogDto>() {
@Override
protected DebugConfDialogDto load() {
return new DebugConfDialogDto();
}
};
resourcesModel = new LoadableModel<List<ObjectViewDto>>() {
@Override
protected List<ObjectViewDto> load() {
return loadResources();
}
};
getSessionStorage().clearPagingInSession(clearPagingInSession);
initLayout();
}
private List<ObjectViewDto> loadResources() {
List<ObjectViewDto> objects = new ArrayList<>();
try {
OperationResult result = new OperationResult(OPERATION_LOAD_RESOURCES);
List<PrismObject<ResourceType>> list = WebModelUtils.searchObjects(ResourceType.class, null,
SelectorOptions.createCollection(GetOperationOptions.createRaw()), result, this, null);
for (PrismObject obj : list) {
ObjectViewDto dto = new ObjectViewDto(obj.getOid(), WebMiscUtil.getName(obj));
objects.add(dto);
}
} catch (Exception ex) {
// todo implement error handling
}
Collections.sort(objects, new Comparator<ObjectViewDto>() {
@Override
public int compare(ObjectViewDto o1, ObjectViewDto o2) {
return String.CASE_INSENSITIVE_ORDER.compare(o1.getName(), o2.getName());
}
});
return objects;
}
private void initLayout() {
DeleteAllDialog deleteAllDialog = new DeleteAllDialog(ID_DELETE_ALL_DIALOG,
createStringResource("pageDebugList.dialog.title.deleteAll")) {
@Override
public void yesPerformed(AjaxRequestTarget target) {
close(target);
deleteAllIdentitiesConfirmed(target, getModel().getObject());
}
};
add(deleteAllDialog);
ConfirmationDialog deleteConfirm = new ConfirmationDialog(ID_CONFIRM_DELETE_POPUP,
createStringResource("pageDebugList.dialog.title.confirmDelete"),
createDeleteConfirmString()) {
@Override
public void yesPerformed(AjaxRequestTarget target) {
close(target);
DebugConfDialogDto dto = confDialogModel.getObject();
switch (dto.getOperation()) {
case DELETE_ALL_TYPE:
deleteAllTypeConfirmed(target);
break;
case DELETE_SELECTED:
deleteSelectedConfirmed(target, dto.getObjects());
break;
case DELETE_RESOURCE_SHADOWS:
deleteAllShadowsOnResourceConfirmed(target);
break;
}
}
@Override
public boolean getLabelEscapeModelStrings() {
return false;
}
};
add(deleteConfirm);
Form main = new Form(ID_MAIN_FORM);
add(main);
DebugSearchDto dto = searchModel.getObject();
Class type = dto.getType().getClassDefinition();
addOrReplaceTable(new RepositoryObjectDataProvider(this, type) {
@Override
protected void saveProviderPaging(ObjectQuery query, ObjectPaging paging) {
ConfigurationStorage storage = getSessionStorage().getConfiguration();
storage.setDebugSearchPaging(paging);
}
});
PageDebugDownloadBehaviour ajaxDownloadBehavior = new PageDebugDownloadBehaviour();
main.add(ajaxDownloadBehavior);
}
private void initDownload(AjaxRequestTarget target, Class<? extends ObjectType> type, ObjectQuery query) {
List<PageDebugDownloadBehaviour> list = get(ID_MAIN_FORM)
.getBehaviors(PageDebugDownloadBehaviour.class);
PageDebugDownloadBehaviour downloadBehaviour = list.get(0);
downloadBehaviour.setType(type);
downloadBehaviour.setQuery(query);
downloadBehaviour.setUseZip(hasToZip());
downloadBehaviour.initiate(target);
}
private void addOrReplaceTable(RepositoryObjectDataProvider provider) {
provider.setQuery(createQuery());
Form mainForm = (Form) get(ID_MAIN_FORM);
BoxedTablePanel table = new BoxedTablePanel(ID_TABLE, provider, initColumns(provider.getType()),
UserProfileStorage.TableId.CONF_DEBUG_LIST_PANEL,
(int) getItemsPerPage(UserProfileStorage.TableId.CONF_DEBUG_LIST_PANEL)) {
@Override
protected WebMarkupContainer createHeader(String headerId) {
return new SearchFragment(headerId, ID_TABLE_HEADER, PageDebugList.this, searchModel,
resourcesModel);
}
};
table.setOutputMarkupId(true);
ConfigurationStorage storage = getSessionStorage().getConfiguration();
table.setCurrentPage(storage.getDebugSearchPaging());
mainForm.addOrReplace(table);
}
private List<IColumn> initColumns(final Class<? extends ObjectType> type) {
List<IColumn> columns = new ArrayList<>();
IColumn column = new CheckBoxHeaderColumn<ObjectType>();
columns.add(column);
column = new LinkColumn<DebugObjectItem>(createStringResource("pageDebugList.name"),
DebugObjectItem.F_NAME, DebugObjectItem.F_NAME) {
@Override
public void populateItem(Item<ICellPopulator<DebugObjectItem>> cellItem, String componentId,
IModel<DebugObjectItem> rowModel) {
cellItem.add(new TwoValueLinkPanel(componentId,
new PropertyModel<String>(rowModel, DebugObjectItem.F_NAME),
new PropertyModel<String>(rowModel, DebugObjectItem.F_OID)));
}
@Override
public void onClick(AjaxRequestTarget target, IModel<DebugObjectItem> rowModel) {
DebugObjectItem object = rowModel.getObject();
objectEditPerformed(target, object.getOid(), type);
}
};
columns.add(column);
columns.add(new PropertyColumn(createStringResource("pageDebugList.description"),
DebugObjectItem.F_DESCRIPTION));
if (ShadowType.class.isAssignableFrom(type)) {
columns.add(new PropertyColumn(createStringResource("pageDebugList.resourceName"),
DebugObjectItem.F_RESOURCE_NAME));
columns.add(new PropertyColumn(createStringResource("pageDebugList.resourceType"),
DebugObjectItem.F_RESOURCE_TYPE));
}
column = new AbstractColumn<DebugObjectItem, String>(new Model(), null) {
@Override
public String getCssClass() {
return "debug-list-buttons";
}
@Override
public void populateItem(Item<ICellPopulator<DebugObjectItem>> cellItem, String componentId,
IModel<DebugObjectItem> rowModel) {
cellItem.add(new DebugButtonPanel<DebugObjectItem>(componentId, rowModel) {
@Override
public void deletePerformed(AjaxRequestTarget target, IModel<DebugObjectItem> model) {
deleteSelected(target, model.getObject());
}
@Override
public void exportPerformed(AjaxRequestTarget target, IModel<DebugObjectItem> model) {
exportSelected(target, model.getObject());
}
});
}
};
columns.add(column);
column = new InlineMenuHeaderColumn<InlineMenuable>(initInlineMenu()) {
@Override
public void populateItem(Item<ICellPopulator<InlineMenuable>> cellItem, String componentId,
IModel<InlineMenuable> rowModel) {
// we don't need row inline menu
cellItem.add(new Label(componentId));
}
};
columns.add(column);
return columns;
}
private List<InlineMenuItem> initInlineMenu() {
List<InlineMenuItem> headerMenuItems = new ArrayList<>();
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.exportSelected"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
exportSelected(target, null);
}
}));
headerMenuItems
.add(new InlineMenuItem(createStringResource("pageDebugList.menu.exportAllSelectedType"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
exportAllType(target);
}
}));
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.exportAll"), true,
new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
exportAll(target);
}
}));
headerMenuItems.add(new InlineMenuItem());
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteSelected"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
deleteSelected(target, null);
}
}));
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteAllType"), true,
new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
deleteAllType(target);
}
}));
headerMenuItems
.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteShadowsOnResource"),
new Model(true), new AbstractReadOnlyModel<Boolean>() {
@Override
public Boolean getObject() {
DebugSearchDto dto = searchModel.getObject();
return ObjectTypes.SHADOW.equals(dto.getType());
}
}, false, new HeaderMenuAction(this) {
@Override
public void onClick(AjaxRequestTarget target) {
deleteAllShadowsOnResource(target);
}
}));
headerMenuItems.add(new InlineMenuItem());
headerMenuItems.add(new InlineMenuItem(createStringResource("pageDebugList.menu.deleteAllIdentities"),
true, new HeaderMenuAction(this) {
@Override
public void onSubmit(AjaxRequestTarget target, Form<?> form) {
deleteAllIdentities(target);
}
}));
return headerMenuItems;
}
private boolean hasToZip() {
BoxedTablePanel table = (BoxedTablePanel) getListTable();
SearchFragment header = (SearchFragment) table.getHeader();
AjaxCheckBox zipCheck = header.getZipCheck();
return zipCheck.getModelObject();
}
private Table getListTable() {
return (Table) get(createComponentPath(ID_MAIN_FORM, ID_TABLE));
}
private void listObjectsPerformed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
ObjectTypes selected = dto.getType();
RepositoryObjectDataProvider provider = getTableDataProvider();
provider.setQuery(createQuery());
if (selected != null) {
provider.setType(selected.getClassDefinition());
addOrReplaceTable(provider);
}
// save object type category to session storage, used by back button
ConfigurationStorage storage = getSessionStorage().getConfiguration();
storage.setDebugSearchDto(dto);
Table table = getListTable();
target.add((Component) table);
}
private ObjectQuery createQuery() {
DebugSearchDto dto = searchModel.getObject();
List<ObjectFilter> filters = new ArrayList<>();
if (ObjectTypes.SHADOW.equals(dto.getType()) && dto.getResource() != null) {
String oid = dto.getResource().getOid();
RefFilter ref = RefFilter.createReferenceEqual(ShadowType.F_RESOURCE_REF, ShadowType.class,
getPrismContext(), oid);
filters.add(ref);
}
if (StringUtils.isNotEmpty(dto.getText())) {
String nameText = dto.getText();
PolyStringNormalizer normalizer = getPrismContext().getDefaultPolyStringNormalizer();
String normalizedString = normalizer.normalize(nameText);
ObjectFilter substring = SubstringFilter.createSubstring(ObjectType.F_NAME, ObjectType.class,
getPrismContext(), PolyStringNormMatchingRule.NAME, normalizedString);
filters.add(substring);
}
if (filters.isEmpty()) {
return null;
}
ObjectFilter filter = filters.size() > 1 ? AndFilter.createAnd(filters) : filters.get(0);
ObjectQuery query = new ObjectQuery();
query.setFilter(filter);
return query;
}
private void objectEditPerformed(AjaxRequestTarget target, String oid, Class<? extends ObjectType> type) {
PageParameters parameters = new PageParameters();
parameters.add(PageDebugView.PARAM_OBJECT_ID, oid);
parameters.add(PageDebugView.PARAM_OBJECT_TYPE, type.getSimpleName());
setResponsePage(PageDebugView.class, parameters);
}
private RepositoryObjectDataProvider getTableDataProvider() {
Table tablePanel = getListTable();
DataTable table = tablePanel.getDataTable();
return (RepositoryObjectDataProvider) table.getDataProvider();
}
private IModel<String> createDeleteConfirmString() {
return new AbstractReadOnlyModel<String>() {
@Override
public String getObject() {
DebugConfDialogDto dto = confDialogModel.getObject();
switch (dto.getOperation()) {
case DELETE_ALL_TYPE:
String key = ObjectTypeGuiDescriptor.getDescriptor(dto.getType())
.getLocalizationKey();
String type = createStringResource(key).getString();
return createStringResource("pageDebugList.message.deleteAllType", type).getString();
case DELETE_SELECTED:
List<DebugObjectItem> selectedList = dto.getObjects();
if (selectedList.size() > 1) {
return createStringResource("pageDebugList.message.deleteSelectedConfirm",
selectedList.size()).getString();
}
DebugObjectItem selectedItem = selectedList.get(0);
return createStringResource("pageDebugList.message.deleteObjectConfirm",
selectedItem.getName()).getString();
case DELETE_RESOURCE_SHADOWS:
DebugSearchDto search = searchModel.getObject();
return createStringResource("pageDebugList.messsage.deleteAllResourceShadows",
search.getResource().getName()).getString();
}
return "";
}
};
}
private void deleteAllIdentitiesConfirmed(AjaxRequestTarget target, DeleteAllDto dto) {
Collection<SelectorOptions<GetOperationOptions>> options = new ArrayList<>();
GetOperationOptions opt = GetOperationOptions.createRaw();
options.add(SelectorOptions.create(ItemPath.EMPTY_PATH, opt));
OperationResult result = new OperationResult(OPERATION_LAXATIVE_DELETE);
try {
if (dto.getDeleteUsers()) {
ObjectQuery query = createDeleteAllUsersQuery();
deleteObjectsAsync(UserType.COMPLEX_TYPE, query, true, "Delete all users", result);
}
if (dto.getDeleteOrgs()) {
deleteObjectsAsync(OrgType.COMPLEX_TYPE, null, true, "Delete all orgs", result);
}
if (dto.getDeleteAccountShadow()) {
deleteAllShadowsConfirmed(result, true);
}
if (dto.getDeleteNonAccountShadow()) {
deleteAllShadowsConfirmed(result, false);
}
} catch (Exception ex) {
result.computeStatus(getString("pageDebugList.message.laxativeProblem"));
LoggingUtils.logException(LOGGER, getString("pageDebugList.message.laxativeProblem"), ex);
}
target.add(getFeedbackPanel());
result.recomputeStatus();
showResult(result);
}
private ObjectQuery createDeleteAllUsersQuery() {
InOidFilter inOid = InOidFilter.createInOid(SystemObjectsType.USER_ADMINISTRATOR.value());
NotFilter not = new NotFilter(inOid);
return ObjectQuery.createObjectQuery(not);
}
private void deleteAllShadowsConfirmed(OperationResult result, boolean deleteAccountShadows)
throws ObjectAlreadyExistsException, ObjectNotFoundException, SchemaException {
ObjectFilter kind = EqualFilter.createEqual(ShadowType.F_KIND, ShadowType.class, getPrismContext(),
null, ShadowKindType.ACCOUNT);
String taskName;
ObjectQuery query;
if (deleteAccountShadows) {
taskName = "Delete all account shadows";
query = ObjectQuery.createObjectQuery(kind);
} else {
taskName = "Delete all non-account shadows";
query = ObjectQuery.createObjectQuery(NotFilter.createNot(kind));
}
deleteObjectsAsync(ShadowType.COMPLEX_TYPE, query, true, taskName, result);
}
private void exportSelected(AjaxRequestTarget target, DebugObjectItem item) {
List<DebugObjectItem> selected = getSelectedData(target, item);
if (selected.isEmpty()) {
return;
}
List<String> oids = new ArrayList<>();
for (DebugObjectItem dItem : selected) {
oids.add(dItem.getOid());
}
ObjectFilter filter = InOidFilter.createInOid(oids);
DebugSearchDto searchDto = searchModel.getObject();
initDownload(target, searchDto.getType().getClassDefinition(), ObjectQuery.createObjectQuery(filter));
}
private void exportAllType(AjaxRequestTarget target) {
DebugSearchDto searchDto = searchModel.getObject();
initDownload(target, searchDto.getType().getClassDefinition(), null);
}
private void exportAll(AjaxRequestTarget target) {
initDownload(target, ObjectType.class, null);
}
private void deleteAllType(AjaxRequestTarget target) {
DebugSearchDto searchDto = searchModel.getObject();
DebugConfDialogDto dto = new DebugConfDialogDto(DebugConfDialogDto.Operation.DELETE_ALL_TYPE, null,
searchDto.getType().getClassDefinition());
confDialogModel.setObject(dto);
ModalWindow dialog = (ModalWindow) get(ID_CONFIRM_DELETE_POPUP);
dialog.show(target);
}
private List<DebugObjectItem> getSelectedData(AjaxRequestTarget target, DebugObjectItem item) {
List<DebugObjectItem> items;
if (item != null) {
items = new ArrayList<>();
items.add(item);
return items;
}
items = WebMiscUtil.getSelectedData(getListTable());
if (items.isEmpty()) {
warn(getString("pageDebugList.message.nothingSelected"));
target.add(getFeedbackPanel());
}
return items;
}
private void deleteSelected(AjaxRequestTarget target, DebugObjectItem item) {
List<DebugObjectItem> selected = getSelectedData(target, item);
if (selected.isEmpty()) {
return;
}
DebugSearchDto searchDto = searchModel.getObject();
DebugConfDialogDto dto = new DebugConfDialogDto(DebugConfDialogDto.Operation.DELETE_SELECTED,
selected, searchDto.getType().getClassDefinition());
confDialogModel.setObject(dto);
ModalWindow dialog = (ModalWindow) get(ID_CONFIRM_DELETE_POPUP);
dialog.show(target);
}
private void deleteAllIdentities(AjaxRequestTarget target) {
DeleteAllDialog dialog = (DeleteAllDialog) get(ID_DELETE_ALL_DIALOG);
dialog.show(target);
}
private void deleteAllTypeConfirmed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
LOGGER.debug("Deleting all of type {}", dto.getType());
OperationResult result = new OperationResult(OPERATION_DELETE_OBJECTS);
try {
ObjectQuery query = null;
if (ObjectTypes.USER.equals(dto.getType())) {
query = createDeleteAllUsersQuery();
}
QName type = dto.getType().getTypeQName();
deleteObjectsAsync(type, query, true, "Delete all of type " + type.getLocalPart(), result);
info(getString("pageDebugList.messsage.deleteAllOfType", dto.getType()));
} catch (Exception ex) {
result.recomputeStatus();
result.recordFatalError("Couldn't delete objects of type " + dto.getType(), ex);
LoggingUtils.logException(LOGGER, "Couldn't delete objects of type " + dto.getType(), ex);
}
showResult(result);
target.add(getFeedbackPanel());
}
private void deleteSelectedConfirmed(AjaxRequestTarget target, List<DebugObjectItem> items) {
DebugConfDialogDto dto = confDialogModel.getObject();
OperationResult result = new OperationResult(OPERATION_DELETE_OBJECTS);
for (DebugObjectItem bean : items) {
WebModelUtils.deleteObject(dto.getType(), bean.getOid(), ModelExecuteOptions.createRaw(), result,
this);
}
result.computeStatusIfUnknown();
RepositoryObjectDataProvider provider = getTableDataProvider();
provider.clearCache();
showResult(result);
target.add((Component) getListTable());
target.add(getFeedbackPanel());
}
private void clearSearchPerformed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
dto.setText(null);
listObjectsPerformed(target);
}
private void deleteAllShadowsOnResource(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
if (dto.getResource() == null) {
error(getString("pageDebugList.message.resourceNotSelected"));
target.add(getFeedbackPanel());
return;
}
LOGGER.debug("Displaying delete all shadows on resource {} confirmation dialog",
dto.getResource().getName());
DebugConfDialogDto dialogDto = new DebugConfDialogDto(
DebugConfDialogDto.Operation.DELETE_RESOURCE_SHADOWS, null, null);
confDialogModel.setObject(dialogDto);
ModalWindow dialog = (ModalWindow) get(ID_CONFIRM_DELETE_POPUP);
dialog.show(target);
}
private void deleteAllShadowsOnResourceConfirmed(AjaxRequestTarget target) {
DebugSearchDto dto = searchModel.getObject();
String resourceOid = dto.getResource().getOid();
LOGGER.debug("Deleting shadows on resource {}", resourceOid);
OperationResult result = new OperationResult(OPERATION_DELETE_SHADOWS);
try {
RefFilter ref = RefFilter.createReferenceEqual(ShadowType.F_RESOURCE_REF, ShadowType.class,
getPrismContext(), dto.getResource().getOid());
ObjectQuery objectQuery = ObjectQuery.createObjectQuery(ref);
QName type = ShadowType.COMPLEX_TYPE;
deleteObjectsAsync(type, objectQuery, true, "Delete shadows on " + dto.getResource().getName(),
result);
info(getString("pageDebugList.messsage.deleteAllShadowsStarted", dto.getResource().getName()));
} catch (Exception ex) {
result.recomputeStatus();
result.recordFatalError("Couldn't delete shadows.", ex);
LoggingUtils.logException(LOGGER, "Couldn't delete shadows", ex);
}
showResult(result);
target.add(getFeedbackPanel());
}
private void deleteObjectsAsync(QName type, ObjectQuery objectQuery, boolean raw, String taskName,
OperationResult result)
throws SchemaException, ObjectAlreadyExistsException, ObjectNotFoundException {
Task task = createSimpleTask(result.getOperation());
task.setHandlerUri(ModelPublicConstants.DELETE_TASK_HANDLER_URI);
if (objectQuery == null) {
objectQuery = new ObjectQuery();
}
QueryType query = QueryJaxbConvertor.createQueryType(objectQuery, getPrismContext());
PrismPropertyDefinition queryDef = new PrismPropertyDefinition(
SchemaConstants.MODEL_EXTENSION_OBJECT_QUERY, QueryType.COMPLEX_TYPE, getPrismContext());
PrismProperty<QueryType> queryProp = queryDef.instantiate();
queryProp.setRealValue(query);
task.setExtensionProperty(queryProp);
PrismPropertyDefinition typeDef = new PrismPropertyDefinition(
SchemaConstants.MODEL_EXTENSION_OBJECT_TYPE, DOMUtil.XSD_QNAME, getPrismContext());
PrismProperty<QName> typeProp = typeDef.instantiate();
typeProp.setRealValue(type);
task.setExtensionProperty(typeProp);
PrismPropertyDefinition rawDef = new PrismPropertyDefinition(
SchemaConstants.MODEL_EXTENSION_OPTION_RAW, DOMUtil.XSD_BOOLEAN, getPrismContext());
PrismProperty<QName> rawProp = rawDef.instantiate();
rawProp.setRealValue(raw);
task.setExtensionProperty(rawProp);
task.setName(taskName);
task.savePendingModifications(result);
TaskManager taskManager = getTaskManager();
taskManager.switchToBackground(task, result);
}
private static class SearchFragment extends Fragment {
public SearchFragment(String id, String markupId, MarkupContainer markupProvider,
IModel<DebugSearchDto> model, IModel<List<ObjectViewDto>> resourcesModel) {
super(id, markupId, markupProvider, model);
initLayout(resourcesModel);
}
private void initLayout(IModel<List<ObjectViewDto>> resourcesModel) {
final Form searchForm = new Form(ID_SEARCH_FORM);
add(searchForm);
searchForm.setOutputMarkupId(true);
final IModel<DebugSearchDto> model = (IModel) getDefaultModel();
BasicSearchPanel<DebugSearchDto> basicSearch = new BasicSearchPanel<DebugSearchDto>(
ID_BASIC_SEARCH, model) {
@Override
protected IModel<String> createSearchTextModel() {
return new PropertyModel<>(model, DebugSearchDto.F_TEXT);
}
@Override
protected void searchPerformed(AjaxRequestTarget target) {
PageDebugList page = (PageDebugList) getPage();
page.listObjectsPerformed(target);
}
@Override
protected void clearSearchPerformed(AjaxRequestTarget target) {
PageDebugList page = (PageDebugList) getPage();
page.clearSearchPerformed(target);
}
};
searchForm.add(basicSearch);
IChoiceRenderer<ObjectTypes> renderer = new IChoiceRenderer<ObjectTypes>() {
@Override
public Object getDisplayValue(ObjectTypes object) {
ObjectTypeGuiDescriptor descr = ObjectTypeGuiDescriptor.getDescriptor(object);
String key = descr != null ? descr.getLocalizationKey()
: ObjectTypeGuiDescriptor.ERROR_LOCALIZATION_KEY;
return new StringResourceModel(key, getPage(), null).getString();
}
@Override
public String getIdValue(ObjectTypes object, int index) {
return object.getClassDefinition().getSimpleName();
}
};
DropDownChoice choice = new DropDownChoice(ID_CHOICE,
new PropertyModel(model, DebugSearchDto.F_TYPE), createChoiceModel(renderer), renderer);
searchForm.add(choice);
choice.add(new OnChangeAjaxBehavior() {
@Override
protected void onUpdate(AjaxRequestTarget target) {
PageDebugList page = (PageDebugList) getPage();
page.listObjectsPerformed(target);
}
});
DropDownChoice resource = new DropDownChoice(ID_RESOURCE,
new PropertyModel(model, DebugSearchDto.F_RESOURCE_OID), resourcesModel,
createResourceRenderer());
resource.setNullValid(true);
resource.add(new AjaxFormComponentUpdatingBehavior("onblur") {
@Override
protected void onUpdate(AjaxRequestTarget target) {
// nothing to do, it's here just to update model
}
});
resource.add(new VisibleEnableBehaviour() {
@Override
public boolean isVisible() {
DebugSearchDto dto = model.getObject();
return ObjectTypes.SHADOW.equals(dto.getType());
}
});
searchForm.add(resource);
AjaxCheckBox zipCheck = new AjaxCheckBox(ID_ZIP_CHECK, new Model<>(false)) {
@Override
protected void onUpdate(AjaxRequestTarget target) {
}
};
add(zipCheck);
}
public AjaxCheckBox getZipCheck() {
return (AjaxCheckBox) get(ID_ZIP_CHECK);
}
private IModel<List<ObjectTypes>> createChoiceModel(final IChoiceRenderer<ObjectTypes> renderer) {
return new LoadableModel<List<ObjectTypes>>(false) {
@Override
protected List<ObjectTypes> load() {
List<ObjectTypes> choices = new ArrayList<>();
Collections.addAll(choices, ObjectTypes.values());
choices.remove(ObjectTypes.OBJECT);
Collections.sort(choices, new Comparator<ObjectTypes>() {
@Override
public int compare(ObjectTypes o1, ObjectTypes o2) {
String str1 = (String) renderer.getDisplayValue(o1);
String str2 = (String) renderer.getDisplayValue(o2);
return String.CASE_INSENSITIVE_ORDER.compare(str1, str2);
}
});
return choices;
}
};
}
private IChoiceRenderer<ObjectViewDto> createResourceRenderer() {
return new IChoiceRenderer<ObjectViewDto>() {
@Override
public Object getDisplayValue(ObjectViewDto object) {
if (object == null) {
return getString("pageDebugList.resource");
}
return object.getName();
}
@Override
public String getIdValue(ObjectViewDto object, int index) {
return Integer.toString(index);
}
};
}
}
}
| fixing MID-2720
| gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/configuration/PageDebugList.java | fixing MID-2720 |
|
Java | apache-2.0 | 52d7c48128be059882acbda3e56a4bb3b8b12f47 | 0 | hivemq/hivemq-spi | package com.hivemq.spi.metrics;
import com.codahale.metrics.*;
import com.hivemq.spi.callback.events.*;
import com.hivemq.spi.callback.security.*;
import com.hivemq.spi.callback.lowlevel.*;
import com.hivemq.spi.services.PluginExecutorService;
/**
* This class holds a constant {@link HiveMQMetric} for every metric which is provided by HiveMQ
*
* @author Christoph Schäbel
*/
public class HiveMQMetrics {
public static final String PLUGIN_EXECUTOR_PREFIX = "com.hivemq.plugin.executor";
public static final String EXCEPTION_PREFIX = "com.hivemq.exceptions";
/**
* represents a {@link Counter}, which counts every incoming MQTT message
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_MESSAGE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.total.count", Counter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT message
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_MESSAGE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.total.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_MESSAGE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.total.rate", Meter.class);
/**
* represents a {@link Meter},which measures the current rate of outgoing MQTT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_MESSAGE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.total.rate", Meter.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> INCOMING_MESSAGE_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.total.bytes", Histogram.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> OUTGOING_MESSAGE_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.total.bytes", Histogram.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT CONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_CONNECT_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.connect.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT CONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_CONNECT_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.connect.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT CONNACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_CONNACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.connack.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT CONNACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_CONNACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.connack.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBLISH_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.publish.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBLISH_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.publish.count", Counter.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> INCOMING_PUBLISH_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.publish.bytes", Histogram.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> OUTGOING_PUBLISH_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.publish.bytes", Histogram.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBLISH_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.publish.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBLISH_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.publish.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT DISCONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_DISCONNECT_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.disconnect.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT DISCONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_DISCONNECT_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.disconnect.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PINGREQ messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PINGREQ_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pingreq.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PINGREQ messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PINGREQ_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pingreq.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of outgoing MQTT PINGRESP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PINGRESP_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pingresp.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PINGRESP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PINGRESP_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pingresp.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.puback.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.puback.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.puback.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.puback.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBCOMP_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubcomp.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBCOMP_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubcomp.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBCOMP_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubcomp.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBCOMP_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubcomp.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBREC_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrec.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBREC_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrec.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBREC_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrec.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBREC_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrec.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBREL_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrel.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBREL_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrel.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBREL_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrel.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBREL_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrel.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT SUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_SUBSCRIBE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.subscribe.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT SUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_SUBSCRIBE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.subscribe.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT SUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_SUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.suback.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT SUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_SUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.suback.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT UNSUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_UNSUBSCRIBE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.unsubscribe.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT UNSUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_UNSUBSCRIBE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.unsubscribe.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT UNSUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_UNSUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.unsuback.rate", Meter.class);
/**
* represents a {@link Gauge}, which holds the current amount of retained messages
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> RETAINED_MESSAGES_CURRENT =
HiveMQMetric.valueOf("com.hivemq.messages.retained.current", Gauge.class);
/**
* represents a {@link Histogram}, which holds the current amount of retained messages
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> RETAINED_MESSAGES_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.retained.mean", Histogram.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT UNSUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_UNSUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.unsuback.count", Counter.class);
/**
* represents a {@link Gauge}, which holds the current (last 5 seconds) amount of read bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_READ_CURRENT =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.read.current", Gauge.class);
/**
* represents a {@link Gauge}, which holds the current (last 5 seconds) amount of written bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_WRITE_CURRENT =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.write.current", Gauge.class);
/**
* represents a {@link Gauge}, which holds the total amount of read bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_READ_TOTAL =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.read.total", Gauge.class);
/**
* represents a {@link Gauge}, which holds total of written bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_WRITE_TOTAL =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.write.total", Gauge.class);
/**
* represents a {@link Gauge}, which holds the current total number of connections
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> CONNECTIONS_OVERALL_CURRENT =
HiveMQMetric.valueOf("com.hivemq.networking.connections.current", Gauge.class);
/**
* represents a {@link Gauge}, which holds the mean total number of connections
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> CONNECTIONS_OVERALL_MEAN =
HiveMQMetric.valueOf("com.hivemq.networking.connections.mean", Histogram.class);
/**
* represents a {@link Counter}, which measures the current count of subscriptions
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> SUBSCRIPTIONS_CURRENT =
HiveMQMetric.valueOf("com.hivemq.subscriptions.overall.current", Counter.class);
/**
* represents a {@link Counter}, which measures the current count of active persistent sessions
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> PERSISTENT_SESSIONS_ACTIVE =
HiveMQMetric.valueOf("com.hivemq.sessions.persistent.active", Counter.class);
/**
* represents a {@link Gauge}, which measures the current count of stored sessions
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> CLIENT_SESSIONS_CURRENT =
HiveMQMetric.valueOf("com.hivemq.sessions.overall.current", Gauge.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_SUMBITTED =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".submitted", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> PLUGIN_EXECUTOR_SERVICE_RUNNING =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".running", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_COMPLETED =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".completed", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_EXECUTOR_SERVICE_DURATION =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".duration", Timer.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_ONCE =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.once", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_REPETITIVELY =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.repetitively", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_OVERRUN =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.overrun", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_PERCENT_OF_PERIOD =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.percent-of-period", Histogram.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link AfterLoginCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AFTER_LOGIN_SUCCESS =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.after-login.success.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link AfterLoginCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AFTER_LOGIN_FAILED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.after-login.failed.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnAuthenticationCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AUTHENTICATION =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.authentication.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link RestrictionsAfterLoginCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_RESTRICTIONS =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.restrictions.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnAuthorizationCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AUTHORIZATION =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.authorization.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnInsufficientPermissionDisconnect}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PERMISSIONS_DISCONNECT_PUBLISH =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.permissions-disconnect.publish.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnInsufficientPermissionDisconnect}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PERMISSIONS_DISCONNECT_SUBSCRIBE =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.permissions-disconnect.subscribe.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnConnectCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_CONNECT =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.connect.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnDisconnectCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_DISCONNECT =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.disconnect.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPublishReceivedCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBLISH_RECEIVED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.publish-received.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPublishSend}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBLISH_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.publish-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnSubscribeCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_SUBSCRIBE =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.subscribe.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompSend} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBACK_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.puback-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompReceived} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBACK_RECEIVED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.puback-received.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompSend} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_SUBACK_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.suback-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompReceived} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_UNSUBACK_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.unsuback-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompSend} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBCOMP_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.pubcomp-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompReceived} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBCOMP_RECEIVED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.pubcomp-received.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompSend} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBREC_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.pubrec-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompReceived} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBREC_RECEIVED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.pubrec-received.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompSend} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBREL_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.pubrel-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPubcompReceived} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBREL_RECEIVED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.pubrel-received.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnConnackSend} callback
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_CONNACK_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.connack-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnUnsubscribeCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_UNSUBSCRIBE =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.unsubscribe.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPingCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PING =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.ping.time", Timer.class);
/**
* represents a {@link Meter}, which measures the rate of unhandled Exceptions
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> TOTAL_EXCEPTION_RATE =
HiveMQMetric.valueOf(EXCEPTION_PREFIX + ".total", Meter.class);
}
| src/main/java/com/hivemq/spi/metrics/HiveMQMetrics.java | package com.hivemq.spi.metrics;
import com.codahale.metrics.*;
import com.hivemq.spi.callback.events.*;
import com.hivemq.spi.callback.security.*;
import com.hivemq.spi.services.PluginExecutorService;
/**
* This class holds a constant {@link HiveMQMetric} for every metric which is provided by HiveMQ
*
* @author Christoph Schäbel
*/
public class HiveMQMetrics {
public static final String PLUGIN_EXECUTOR_PREFIX = "com.hivemq.plugin.executor";
public static final String EXCEPTION_PREFIX = "com.hivemq.exceptions";
/**
* represents a {@link Counter}, which counts every incoming MQTT message
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_MESSAGE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.total.count", Counter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT message
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_MESSAGE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.total.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_MESSAGE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.total.rate", Meter.class);
/**
* represents a {@link Meter},which measures the current rate of outgoing MQTT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_MESSAGE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.total.rate", Meter.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> INCOMING_MESSAGE_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.total.bytes", Histogram.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> OUTGOING_MESSAGE_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.total.bytes", Histogram.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT CONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_CONNECT_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.connect.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT CONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_CONNECT_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.connect.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT CONNACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_CONNACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.connack.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT CONNACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_CONNACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.connack.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBLISH_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.publish.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBLISH_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.publish.count", Counter.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> INCOMING_PUBLISH_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.publish.bytes", Histogram.class);
/**
* represents a {@link Histogram}, which measures the distribution of incoming MQTT message size (payload without fixed header)
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> OUTGOING_PUBLISH_SIZE_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.publish.bytes", Histogram.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBLISH_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.publish.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBLISH messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBLISH_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.publish.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT DISCONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_DISCONNECT_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.disconnect.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT DISCONNECT messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_DISCONNECT_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.disconnect.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PINGREQ messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PINGREQ_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pingreq.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PINGREQ messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PINGREQ_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pingreq.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of outgoing MQTT PINGRESP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PINGRESP_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pingresp.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PINGRESP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PINGRESP_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pingresp.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.puback.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.puback.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.puback.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.puback.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBCOMP_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubcomp.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBCOMP_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubcomp.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBCOMP_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubcomp.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBCOMP messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBCOMP_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubcomp.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBREC_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrec.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBREC_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrec.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBREC_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrec.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBREC messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBREC_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrec.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_PUBREL_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrel.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_PUBREL_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.pubrel.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_PUBREL_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrel.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT PUBREL messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_PUBREL_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.pubrel.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT SUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_SUBSCRIBE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.subscribe.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT SUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_SUBSCRIBE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.subscribe.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT SUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_SUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.suback.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT SUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_SUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.suback.count", Counter.class);
/**
* represents a {@link Meter},which measures the current rate of incoming MQTT UNSUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> INCOMING_UNSUBSCRIBE_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.unsubscribe.rate", Meter.class);
/**
* represents a {@link Counter}, which counts every incoming MQTT UNSUBSCRIBE messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> INCOMING_UNSUBSCRIBE_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.incoming.unsubscribe.count", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of outgoing MQTT UNSUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> OUTGOING_UNSUBACK_RATE =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.unsuback.rate", Meter.class);
/**
* represents a {@link Gauge}, which holds the current amount of retained messages
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> RETAINED_MESSAGES_CURRENT =
HiveMQMetric.valueOf("com.hivemq.messages.retained.current", Gauge.class);
/**
* represents a {@link Histogram}, which holds the current amount of retained messages
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> RETAINED_MESSAGES_MEAN =
HiveMQMetric.valueOf("com.hivemq.messages.retained.mean", Histogram.class);
/**
* represents a {@link Counter}, which counts every outgoing MQTT UNSUBACK messages
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> OUTGOING_UNSUBACK_COUNT =
HiveMQMetric.valueOf("com.hivemq.messages.outgoing.unsuback.count", Counter.class);
/**
* represents a {@link Gauge}, which holds the current (last 5 seconds) amount of read bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_READ_CURRENT =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.read.current", Gauge.class);
/**
* represents a {@link Gauge}, which holds the current (last 5 seconds) amount of written bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_WRITE_CURRENT =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.write.current", Gauge.class);
/**
* represents a {@link Gauge}, which holds the total amount of read bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_READ_TOTAL =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.read.total", Gauge.class);
/**
* represents a {@link Gauge}, which holds total of written bytes
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> BYTES_WRITE_TOTAL =
HiveMQMetric.valueOf("com.hivemq.networking.bytes.write.total", Gauge.class);
/**
* represents a {@link Gauge}, which holds the current total number of connections
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> CONNECTIONS_OVERALL_CURRENT =
HiveMQMetric.valueOf("com.hivemq.networking.connections.current", Gauge.class);
/**
* represents a {@link Gauge}, which holds the mean total number of connections
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> CONNECTIONS_OVERALL_MEAN =
HiveMQMetric.valueOf("com.hivemq.networking.connections.mean", Histogram.class);
/**
* represents a {@link Counter}, which measures the current count of subscriptions
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> SUBSCRIPTIONS_CURRENT =
HiveMQMetric.valueOf("com.hivemq.subscriptions.overall.current", Counter.class);
/**
* represents a {@link Counter}, which measures the current count of active persistent sessions
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> PERSISTENT_SESSIONS_ACTIVE =
HiveMQMetric.valueOf("com.hivemq.sessions.persistent.active", Counter.class);
/**
* represents a {@link Gauge}, which measures the current count of stored sessions
*
* @since 3.0
*/
public static final HiveMQMetric<Gauge> CLIENT_SESSIONS_CURRENT =
HiveMQMetric.valueOf("com.hivemq.sessions.overall.current", Gauge.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_SUMBITTED =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".submitted", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> PLUGIN_EXECUTOR_SERVICE_RUNNING =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".running", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_COMPLETED =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".completed", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_EXECUTOR_SERVICE_DURATION =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".duration", Timer.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_ONCE =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.once", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_REPETITIVELY =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.repetitively", Meter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Counter> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_OVERRUN =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.overrun", Counter.class);
/**
* represents a {@link Meter}, which measures the current rate of submitted jobs to the
* {@link PluginExecutorService}
*
* @since 3.0
*/
public static final HiveMQMetric<Histogram> PLUGIN_EXECUTOR_SERVICE_SCHEDULED_PERCENT_OF_PERIOD =
HiveMQMetric.valueOf(PLUGIN_EXECUTOR_PREFIX + ".scheduled.percent-of-period", Histogram.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link AfterLoginCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AFTER_LOGIN_SUCCESS =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.after-login.success.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link AfterLoginCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AFTER_LOGIN_FAILED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.after-login.failed.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnAuthenticationCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AUTHENTICATION =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.authentication.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link RestrictionsAfterLoginCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_RESTRICTIONS =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.restrictions.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnAuthorizationCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_AUTHORIZATION =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.authorization.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnInsufficientPermissionDisconnect}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PERMISSIONS_DISCONNECT_PUBLISH =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.permissions-disconnect.publish.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnInsufficientPermissionDisconnect}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PERMISSIONS_DISCONNECT_SUBSCRIBE =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.permissions-disconnect.subscribe.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnConnectCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_CONNECT =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.connect.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnDisconnectCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_DISCONNECT =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.disconnect.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPublishReceivedCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBLISH_RECEIVED =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.publish-received.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnPublishSend}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_PUBLISH_SEND =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.publish-send.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnSubscribeCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_SUBSCRIBE =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.subscribe.time", Timer.class);
/**
* represents a {@link Timer}, which measures the mean execution time (in nanoseconds)
* of the {@link OnUnsubscribeCallback}
*
* @since 3.0
*/
public static final HiveMQMetric<Timer> PLUGIN_TIMER_UNSUBSCRIBE =
HiveMQMetric.valueOf("com.hivemq.plugin.callbacks.unsubscribe.time", Timer.class);
/**
* represents a {@link Meter}, which measures the rate of unhandled Exceptions
*
* @since 3.0
*/
public static final HiveMQMetric<Meter> TOTAL_EXCEPTION_RATE =
HiveMQMetric.valueOf(EXCEPTION_PREFIX + ".total", Meter.class);
}
| add missing metrics for lowlevel callbacks
| src/main/java/com/hivemq/spi/metrics/HiveMQMetrics.java | add missing metrics for lowlevel callbacks |
|
Java | apache-2.0 | cbb023a1663977bdc2c9af642b799be088fe73ec | 0 | phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida,phac-nml/irida | package ca.corefacility.bioinformatics.irida.service.impl;
import ca.corefacility.bioinformatics.irida.model.*;
import ca.corefacility.bioinformatics.irida.model.joins.Join;
import ca.corefacility.bioinformatics.irida.model.joins.impl.ProjectSampleJoin;
import ca.corefacility.bioinformatics.irida.repositories.CRUDRepository;
import ca.corefacility.bioinformatics.irida.repositories.ProjectRepository;
import ca.corefacility.bioinformatics.irida.repositories.UserRepository;
import ca.corefacility.bioinformatics.irida.service.ProjectService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.access.prepost.PreAuthorize;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.transaction.annotation.Transactional;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import javax.validation.Validator;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* A specialized service layer for projects.
*
* @author Franklin Bristow <[email protected]>
*/
public class ProjectServiceImpl extends CRUDServiceImpl<Long, Project> implements ProjectService {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceImpl.class);
private ProjectRepository projectRepository;
private CRUDRepository<Long, Sample> sampleRepository;
private UserRepository userRepository;
public ProjectServiceImpl(ProjectRepository projectRepository, CRUDRepository<Long, Sample> sampleRepository,
UserRepository userRepository, Validator validator) {
super(projectRepository, validator, Project.class);
this.projectRepository = projectRepository;
this.sampleRepository = sampleRepository;
this.userRepository = userRepository;
}
/**
* {@inheritDoc}
*/
@Override
@PreAuthorize("hasRole('ROLE_USER')")
@Transactional
public Project create(Project p) {
Project project = super.create(p);
UserDetails userDetails = (UserDetails) SecurityContextHolder.getContext().getAuthentication().getPrincipal();
User user = userRepository.getUserByUsername(userDetails.getUsername());
addUserToProject(project, user, null);
return project;
}
/**
* {@inheritDoc}
*/
@Override
@Transactional
public Join<Project, User> addUserToProject(Project project, User user, Role role) {
return projectRepository.addUserToProject(project, user);
}
/**
* {@inheritDoc}
*/
@Override
@Transactional
public void removeUserFromProject(Project project, User user) {
projectRepository.removeUserFromProject(project, user);
}
/**
* {@inheritDoc}
*/
@Override
@Transactional
public ProjectSampleJoin addSampleToProject(Project project, Sample sample) {
logger.trace("Adding sample to project.");
// the sample hasn't been persisted before, persist it before calling
// the relationshipRepository.
if (sample.getId() == null) {
logger.trace("Going to validate and persist sample prior to creating relationship.");
// validate the sample, then persist it:
Set<ConstraintViolation<Sample>> constraintViolations = validator.validate(sample);
if (constraintViolations.isEmpty()) {
sample = sampleRepository.create(sample);
} else {
throw new ConstraintViolationException(new HashSet<ConstraintViolation<?>>(constraintViolations));
}
}
return projectRepository.addSampleToProject(project, sample);
}
/**
* {@inheritDoc}
*/
@Override
@Transactional
public void removeSampleFromProject(Project project, Sample sample) {
projectRepository.removeSampleFromProject(project, sample);
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("deprecation")
@Override
public void removeSequenceFileFromProject(Project project, SequenceFile sf) {
projectRepository.removeFileFromProject(project, sf);
}
/**
* {@inheritDoc}
*/
@Override
@Transactional(readOnly = true)
public List<Join<Project, User>> getProjectsForUser(User user) {
return new ArrayList<Join<Project, User>>(projectRepository.getProjectsForUser(user));
}
}
| src/main/java/ca/corefacility/bioinformatics/irida/service/impl/ProjectServiceImpl.java | package ca.corefacility.bioinformatics.irida.service.impl;
import ca.corefacility.bioinformatics.irida.model.*;
import ca.corefacility.bioinformatics.irida.model.joins.Join;
import ca.corefacility.bioinformatics.irida.model.joins.impl.ProjectSampleJoin;
import ca.corefacility.bioinformatics.irida.repositories.CRUDRepository;
import ca.corefacility.bioinformatics.irida.repositories.ProjectRepository;
import ca.corefacility.bioinformatics.irida.repositories.UserRepository;
import ca.corefacility.bioinformatics.irida.service.ProjectService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.core.userdetails.UserDetails;
import org.springframework.transaction.annotation.Transactional;
import javax.validation.ConstraintViolation;
import javax.validation.ConstraintViolationException;
import javax.validation.Validator;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* A specialized service layer for projects.
*
* @author Franklin Bristow <[email protected]>
*/
public class ProjectServiceImpl extends CRUDServiceImpl<Long, Project> implements ProjectService {
private static final Logger logger = LoggerFactory.getLogger(ProjectServiceImpl.class);
private ProjectRepository projectRepository;
private CRUDRepository<Long, Sample> sampleRepository;
private UserRepository userRepository;
public ProjectServiceImpl(ProjectRepository projectRepository,
CRUDRepository<Long, Sample> sampleRepository,
UserRepository userRepository, Validator validator) {
super(projectRepository, validator, Project.class);
this.projectRepository = projectRepository;
this.sampleRepository = sampleRepository;
this.userRepository = userRepository;
}
/**
* {@inheritDoc}
*/
@Override
@Transactional
public Project create(Project p) {
Project project = super.create(p);
UserDetails userDetails = (UserDetails) SecurityContextHolder.getContext().getAuthentication().getPrincipal();
User user = userRepository.getUserByUsername(userDetails.getUsername());
addUserToProject(project, user, null);
return project;
}
/**
* {@inheritDoc}
*/
@Override
public Join<Project, User> addUserToProject(Project project, User user, Role role) {
return projectRepository.addUserToProject(project, user);
}
/**
* {@inheritDoc}
*/
@Override
public void removeUserFromProject(Project project, User user) {
projectRepository.removeUserFromProject(project, user);
}
/**
* {@inheritDoc}
*/
@Override
public ProjectSampleJoin addSampleToProject(Project project, Sample sample) {
logger.trace("Adding sample to project.");
// the sample hasn't been persisted before, persist it before calling the relationshipRepository.
if (sample.getId() == null) {
logger.trace("Going to validate and persist sample prior to creating relationship.");
// validate the sample, then persist it:
Set<ConstraintViolation<Sample>> constraintViolations = validator.validate(sample);
if (constraintViolations.isEmpty()) {
sample = sampleRepository.create(sample);
} else {
throw new ConstraintViolationException(new HashSet<ConstraintViolation<?>>(constraintViolations));
}
}
return projectRepository.addSampleToProject(project, sample);
}
/**
* {@inheritDoc}
*/
@Override
public void removeSampleFromProject(Project project, Sample sample) {
projectRepository.removeSampleFromProject(project, sample);
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("deprecation")
@Override
public void removeSequenceFileFromProject(Project project, SequenceFile sf) {
projectRepository.removeFileFromProject(project, sf);
}
/**
* {@inheritDoc}
*/
@Override
@Transactional(readOnly = true)
public List<Join<Project, User>> getProjectsForUser(User user) {
return new ArrayList<Join<Project, User>>(projectRepository.getProjectsForUser(user));
}
}
| Basic role-based permissions for projects.
| src/main/java/ca/corefacility/bioinformatics/irida/service/impl/ProjectServiceImpl.java | Basic role-based permissions for projects. |
|
Java | apache-2.0 | f140336a19382c7f5e0d48364874129236878246 | 0 | Zverik/omim,Komzpa/omim,kw217/omim,simon247/omim,mapsme/omim,vladon/omim,albertshift/omim,goblinr/omim,bykoianko/omim,trashkalmar/omim,krasin/omim,Volcanoscar/omim,lydonchandra/omim,lydonchandra/omim,Komzpa/omim,yunikkk/omim,milchakov/omim,trashkalmar/omim,krasin/omim,vladon/omim,mapsme/omim,ygorshenin/omim,krasin/omim,ygorshenin/omim,dkorolev/omim,sidorov-panda/omim,lydonchandra/omim,darina/omim,dobriy-eeh/omim,kw217/omim,edl00k/omim,Saicheg/omim,simon247/omim,edl00k/omim,sidorov-panda/omim,mpimenov/omim,stangls/omim,rokuz/omim,Volcanoscar/omim,krasin/omim,Komzpa/omim,albertshift/omim,felipebetancur/omim,ygorshenin/omim,Zverik/omim,mpimenov/omim,VladiMihaylenko/omim,syershov/omim,mgsergio/omim,dobriy-eeh/omim,vladon/omim,albertshift/omim,gardster/omim,vng/omim,alexzatsepin/omim,andrewshadura/omim,dobriy-eeh/omim,guard163/omim,vladon/omim,ygorshenin/omim,Zverik/omim,Komzpa/omim,Transtech/omim,vasilenkomike/omim,simon247/omim,mpimenov/omim,alexzatsepin/omim,augmify/omim,dobriy-eeh/omim,alexzatsepin/omim,matsprea/omim,edl00k/omim,trashkalmar/omim,VladiMihaylenko/omim,lydonchandra/omim,UdjinM6/omim,guard163/omim,augmify/omim,alexzatsepin/omim,trashkalmar/omim,therearesomewhocallmetim/omim,bykoianko/omim,wersoo/omim,stangls/omim,gardster/omim,dobriy-eeh/omim,bykoianko/omim,milchakov/omim,andrewshadura/omim,albertshift/omim,yunikkk/omim,simon247/omim,65apps/omim,sidorov-panda/omim,wersoo/omim,syershov/omim,vng/omim,syershov/omim,vng/omim,UdjinM6/omim,rokuz/omim,felipebetancur/omim,sidorov-panda/omim,mgsergio/omim,programming086/omim,Saicheg/omim,Transtech/omim,jam891/omim,alexzatsepin/omim,Zverik/omim,bykoianko/omim,jam891/omim,wersoo/omim,mapsme/omim,augmify/omim,milchakov/omim,matsprea/omim,jam891/omim,VladiMihaylenko/omim,igrechuhin/omim,alexzatsepin/omim,gardster/omim,Transtech/omim,milchakov/omim,dkorolev/omim,alexzatsepin/omim,65apps/omim,felipebetancur/omim,felipebetancur/omim,AlexanderMatveenko/omim,bykoianko/omim,edl00k/omim,dkorolev/omim,Saicheg/omim,mgsergio/omim,edl00k/omim,krasin/omim,guard163/omim,syershov/omim,milchakov/omim,Zverik/omim,vasilenkomike/omim,edl00k/omim,vng/omim,Transtech/omim,yunikkk/omim,mpimenov/omim,mgsergio/omim,therearesomewhocallmetim/omim,yunikkk/omim,igrechuhin/omim,Saicheg/omim,edl00k/omim,Endika/omim,Endika/omim,krasin/omim,mgsergio/omim,yunikkk/omim,goblinr/omim,TimurTarasenko/omim,ygorshenin/omim,gardster/omim,mapsme/omim,syershov/omim,Transtech/omim,yunikkk/omim,mpimenov/omim,milchakov/omim,TimurTarasenko/omim,albertshift/omim,Zverik/omim,Volcanoscar/omim,VladiMihaylenko/omim,Transtech/omim,krasin/omim,ygorshenin/omim,kw217/omim,Komzpa/omim,andrewshadura/omim,Endika/omim,yunikkk/omim,AlexanderMatveenko/omim,milchakov/omim,darina/omim,Transtech/omim,AlexanderMatveenko/omim,Transtech/omim,vng/omim,augmify/omim,milchakov/omim,stangls/omim,igrechuhin/omim,Zverik/omim,vng/omim,alexzatsepin/omim,guard163/omim,dkorolev/omim,UdjinM6/omim,goblinr/omim,Transtech/omim,goblinr/omim,65apps/omim,victorbriz/omim,milchakov/omim,guard163/omim,igrechuhin/omim,Komzpa/omim,AlexanderMatveenko/omim,rokuz/omim,UdjinM6/omim,therearesomewhocallmetim/omim,goblinr/omim,goblinr/omim,UdjinM6/omim,alexzatsepin/omim,mpimenov/omim,jam891/omim,milchakov/omim,Endika/omim,victorbriz/omim,vladon/omim,Endika/omim,gardster/omim,TimurTarasenko/omim,AlexanderMatveenko/omim,kw217/omim,syershov/omim,dobriy-eeh/omim,matsprea/omim,programming086/omim,programming086/omim,matsprea/omim,vng/omim,dobriy-eeh/omim,Zverik/omim,Endika/omim,victorbriz/omim,stangls/omim,mapsme/omim,andrewshadura/omim,programming086/omim,igrechuhin/omim,simon247/omim,trashkalmar/omim,wersoo/omim,bykoianko/omim,mpimenov/omim,65apps/omim,Komzpa/omim,alexzatsepin/omim,dkorolev/omim,ygorshenin/omim,trashkalmar/omim,dkorolev/omim,syershov/omim,mgsergio/omim,darina/omim,victorbriz/omim,syershov/omim,jam891/omim,TimurTarasenko/omim,rokuz/omim,rokuz/omim,sidorov-panda/omim,AlexanderMatveenko/omim,programming086/omim,ygorshenin/omim,UdjinM6/omim,goblinr/omim,simon247/omim,Zverik/omim,65apps/omim,victorbriz/omim,Transtech/omim,goblinr/omim,vladon/omim,trashkalmar/omim,ygorshenin/omim,vladon/omim,kw217/omim,mapsme/omim,trashkalmar/omim,gardster/omim,Saicheg/omim,augmify/omim,felipebetancur/omim,mpimenov/omim,andrewshadura/omim,guard163/omim,UdjinM6/omim,vasilenkomike/omim,victorbriz/omim,lydonchandra/omim,sidorov-panda/omim,syershov/omim,andrewshadura/omim,lydonchandra/omim,rokuz/omim,andrewshadura/omim,therearesomewhocallmetim/omim,syershov/omim,felipebetancur/omim,dobriy-eeh/omim,milchakov/omim,vladon/omim,dobriy-eeh/omim,rokuz/omim,AlexanderMatveenko/omim,darina/omim,VladiMihaylenko/omim,vladon/omim,darina/omim,mpimenov/omim,edl00k/omim,gardster/omim,stangls/omim,gardster/omim,goblinr/omim,felipebetancur/omim,vasilenkomike/omim,kw217/omim,Zverik/omim,Volcanoscar/omim,albertshift/omim,matsprea/omim,lydonchandra/omim,UdjinM6/omim,kw217/omim,sidorov-panda/omim,TimurTarasenko/omim,krasin/omim,albertshift/omim,jam891/omim,albertshift/omim,Zverik/omim,syershov/omim,trashkalmar/omim,bykoianko/omim,matsprea/omim,Komzpa/omim,therearesomewhocallmetim/omim,yunikkk/omim,guard163/omim,matsprea/omim,albertshift/omim,edl00k/omim,bykoianko/omim,stangls/omim,UdjinM6/omim,andrewshadura/omim,wersoo/omim,ygorshenin/omim,mapsme/omim,darina/omim,VladiMihaylenko/omim,yunikkk/omim,Zverik/omim,igrechuhin/omim,programming086/omim,darina/omim,UdjinM6/omim,goblinr/omim,Transtech/omim,kw217/omim,rokuz/omim,mgsergio/omim,therearesomewhocallmetim/omim,bykoianko/omim,TimurTarasenko/omim,sidorov-panda/omim,Komzpa/omim,stangls/omim,trashkalmar/omim,vasilenkomike/omim,stangls/omim,Endika/omim,wersoo/omim,victorbriz/omim,simon247/omim,Saicheg/omim,mpimenov/omim,goblinr/omim,65apps/omim,gardster/omim,albertshift/omim,trashkalmar/omim,jam891/omim,sidorov-panda/omim,augmify/omim,jam891/omim,trashkalmar/omim,therearesomewhocallmetim/omim,AlexanderMatveenko/omim,mapsme/omim,stangls/omim,vng/omim,syershov/omim,simon247/omim,simon247/omim,Endika/omim,bykoianko/omim,therearesomewhocallmetim/omim,vng/omim,dobriy-eeh/omim,65apps/omim,wersoo/omim,65apps/omim,Zverik/omim,kw217/omim,Saicheg/omim,victorbriz/omim,alexzatsepin/omim,rokuz/omim,dobriy-eeh/omim,Volcanoscar/omim,programming086/omim,krasin/omim,vasilenkomike/omim,ygorshenin/omim,mapsme/omim,darina/omim,mapsme/omim,rokuz/omim,Volcanoscar/omim,syershov/omim,Transtech/omim,rokuz/omim,rokuz/omim,VladiMihaylenko/omim,kw217/omim,igrechuhin/omim,wersoo/omim,goblinr/omim,AlexanderMatveenko/omim,Saicheg/omim,matsprea/omim,guard163/omim,vasilenkomike/omim,TimurTarasenko/omim,dobriy-eeh/omim,Saicheg/omim,therearesomewhocallmetim/omim,gardster/omim,igrechuhin/omim,AlexanderMatveenko/omim,VladiMihaylenko/omim,Volcanoscar/omim,dkorolev/omim,milchakov/omim,programming086/omim,mgsergio/omim,65apps/omim,mpimenov/omim,victorbriz/omim,stangls/omim,therearesomewhocallmetim/omim,VladiMihaylenko/omim,mgsergio/omim,felipebetancur/omim,VladiMihaylenko/omim,lydonchandra/omim,dkorolev/omim,VladiMihaylenko/omim,igrechuhin/omim,matsprea/omim,alexzatsepin/omim,andrewshadura/omim,felipebetancur/omim,milchakov/omim,augmify/omim,lydonchandra/omim,augmify/omim,65apps/omim,Volcanoscar/omim,mapsme/omim,programming086/omim,guard163/omim,bykoianko/omim,augmify/omim,vasilenkomike/omim,darina/omim,goblinr/omim,programming086/omim,darina/omim,vng/omim,Volcanoscar/omim,Endika/omim,augmify/omim,matsprea/omim,rokuz/omim,Volcanoscar/omim,TimurTarasenko/omim,simon247/omim,mgsergio/omim,bykoianko/omim,dkorolev/omim,bykoianko/omim,TimurTarasenko/omim,dobriy-eeh/omim,TimurTarasenko/omim,andrewshadura/omim,jam891/omim,wersoo/omim,Endika/omim,dkorolev/omim,VladiMihaylenko/omim,darina/omim,vasilenkomike/omim,mpimenov/omim,krasin/omim,yunikkk/omim,wersoo/omim,lydonchandra/omim,mpimenov/omim,Saicheg/omim,ygorshenin/omim,sidorov-panda/omim,igrechuhin/omim,vasilenkomike/omim,mgsergio/omim,Komzpa/omim,darina/omim,mapsme/omim,65apps/omim,victorbriz/omim,jam891/omim,edl00k/omim,alexzatsepin/omim,mgsergio/omim,mapsme/omim,guard163/omim,yunikkk/omim,vladon/omim,darina/omim,stangls/omim,felipebetancur/omim,VladiMihaylenko/omim | package com.mapswithme.maps;
import java.util.Locale;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.location.Location;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.Toast;
import com.mapswithme.maps.bookmarks.BookmarkActivity;
import com.mapswithme.maps.bookmarks.BookmarkCategoriesActivity;
import com.mapswithme.maps.bookmarks.PopupLayout;
import com.mapswithme.maps.bookmarks.data.Bookmark;
import com.mapswithme.maps.bookmarks.data.BookmarkManager;
import com.mapswithme.maps.bookmarks.data.ParcelablePoint;
import com.mapswithme.maps.location.LocationService;
import com.mapswithme.maps.settings.UnitLocale;
import com.mapswithme.util.ConnectionState;
import com.mapswithme.util.Utils;
import com.nvidia.devtech.NvEventQueueActivity;
public class MWMActivity extends NvEventQueueActivity implements LocationService.Listener
{
private static final int PRO_VERSION_DIALOG = 110001;
private static final String PRO_VERSION_DIALOG_MSG = "pro_version_dialog_msg";
//VideoTimer m_timer;
private static String TAG = "MWMActivity";
private MWMApplication mApplication = null;
private BroadcastReceiver m_externalStorageReceiver = null;
private AlertDialog m_storageDisconnectedDialog = null;
private BookmarkManager m_BookmarkManager;
//showDialog(int, Bundle) available only form API 8
private String mProDialogMessage;
private interface OnLongClickListener
{
void onLongClick(int x, int y);
}
private interface OnClickListenter
{
void onClick(int x, int y);
}
private native void addOnLongClickListener(Object l);
private native void removeOnLongClickListener();
private native void addOnClickListener(Object l);
private native void removeOnClickListener();
private LocationService getLocationService()
{
return mApplication.getLocationService();
}
private MapStorage getMapStorage()
{
return mApplication.getMapStorage();
}
private LocationState getLocationState()
{
return mApplication.getLocationState();
}
private void startLocation()
{
getLocationState().onStartLocation();
resumeLocation();
}
private void stopLocation()
{
getLocationState().onStopLocation();
pauseLocation();
}
private void pauseLocation()
{
getLocationService().stopUpdate(this);
// Enable automatic turning screen off while app is idle
Utils.automaticIdleScreen(true, getWindow());
}
private void resumeLocation()
{
getLocationService().startUpdate(this);
// Do not turn off the screen while displaying position
Utils.automaticIdleScreen(false, getWindow());
}
public void checkShouldResumeLocationService()
{
final View v = findViewById(R.id.map_button_myposition);
if (v != null)
{
final LocationState state = getLocationState();
final boolean hasPosition = state.hasPosition();
// check if we need to start location observing
int resID = 0;
if (hasPosition)
resID = R.drawable.myposition_button_found;
else if (state.isFirstPosition())
resID = R.drawable.myposition_button_normal;
if (resID != 0)
{
if (hasPosition && (state.getCompassProcessMode() == LocationState.COMPASS_FOLLOW))
{
state.startCompassFollowing();
v.setBackgroundResource(R.drawable.myposition_button_follow);
}
else
v.setBackgroundResource(resID);
v.setSelected(true);
// start observing in the end (button state can changed here from normal to found).
resumeLocation();
}
else
{
v.setBackgroundResource(R.drawable.myposition_button_normal);
v.setSelected(false);
}
}
}
public void OnDownloadCountryClicked()
{
runOnUiThread(new Runnable()
{
@Override
public void run()
{
nativeDownloadCountry();
}
});
}
@Override
public void OnRenderingInitialized()
{
runOnUiThread(new Runnable()
{
@Override
public void run()
{
// Run all checks in main thread after rendering is initialized.
checkMeasurementSystem();
checkUpdateMaps();
checkFacebookDialog();
checkBuyProDialog();
}
});
}
private Activity getActivity() { return this; }
@Override
public void ReportUnsupported()
{
runOnUiThread(new Runnable()
{
@Override
public void run()
{
new AlertDialog.Builder(getActivity())
.setMessage(getString(R.string.unsupported_phone))
.setCancelable(false)
.setPositiveButton(getString(R.string.close), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
getActivity().moveTaskToBack(true);
dlg.dismiss();
}
})
.create()
.show();
}
});
}
private void checkMeasurementSystem()
{
UnitLocale.initializeCurrentUnits();
}
private native void nativeScale(double k);
public void onPlusClicked(View v)
{
nativeScale(3.0 / 2);
}
public void onMinusClicked(View v)
{
nativeScale(2.0 / 3);
}
public void onBookmarksClicked(View v)
{
if (!mApplication.isProVersion())
{
showProVersionBanner(getString(R.string.bookmarks_in_pro_version));
}
else
{
startActivity(new Intent(this, BookmarkCategoriesActivity.class));
}
}
public void onMyPositionClicked(View v)
{
final LocationState state = mApplication.getLocationState();
if (!state.hasPosition())
{
if (!state.isFirstPosition())
{
// If first time pressed - start location observing:
// Set the button state to "searching" first ...
v.setBackgroundResource(R.drawable.myposition_button_normal);
v.setSelected(true);
// ... and then call startLocation, as there could be my_position button
// state changes in the startLocation.
startLocation();
return;
}
}
else
{
if (!state.isCentered())
{
state.animateToPositionAndEnqueueLocationProcessMode(LocationState.LOCATION_CENTER_ONLY);
v.setSelected(true);
return;
}
else
if (mApplication.isProVersion())
{
// Check if we need to start compass following.
if (state.hasCompass())
{
if (state.getCompassProcessMode() != LocationState.COMPASS_FOLLOW)
{
state.startCompassFollowing();
v.setBackgroundResource(R.drawable.myposition_button_follow);
v.setSelected(true);
return;
}
else
state.stopCompassFollowingAndRotateMap();
}
}
}
// Turn off location search:
// Stop location observing first ...
stopLocation();
// ... and then set button state to default.
v.setBackgroundResource(R.drawable.myposition_button_normal);
v.setSelected(false);
}
private boolean m_needCheckUpdate = true;
private PopupLayout m_popupLayout;
private void checkUpdateMaps()
{
// do it only once
if (m_needCheckUpdate)
{
m_needCheckUpdate = false;
getMapStorage().updateMaps(R.string.advise_update_maps, this, new MapStorage.UpdateFunctor()
{
@Override
public void doUpdate()
{
runDownloadActivity();
}
@Override
public void doCancel()
{
}
});
}
}
@Override
public void onConfigurationChanged(Configuration newConfig)
{
super.onConfigurationChanged(newConfig);
alignZoomButtons();
}
private void showDialogImpl(final int dlgID, int resMsg, DialogInterface.OnClickListener okListener)
{
new AlertDialog.Builder(this)
.setCancelable(false)
.setMessage(getString(resMsg))
.setPositiveButton(getString(R.string.ok), okListener)
.setNeutralButton(getString(R.string.never), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
dlg.dismiss();
mApplication.submitDialogResult(dlgID, MWMApplication.NEVER);
}
})
.setNegativeButton(getString(R.string.later), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
dlg.dismiss();
mApplication.submitDialogResult(dlgID, MWMApplication.LATER);
}
})
.create()
.show();
}
private void showFacebookPage()
{
Intent intent = null;
try
{
// Trying to find package with installed Facebook application.
// Exception is thrown if we don't have one.
getPackageManager().getPackageInfo("com.facebook.katana", 0);
// Profile id is taken from http://graph.facebook.com/MapsWithMe
intent = new Intent(Intent.ACTION_VIEW, Uri.parse("fb://profile/111923085594432"));
// throws ActivityNotFoundException
startActivity(intent);
}
catch (Exception e)
{
// Show Facebook page in browser.
intent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.facebook.com/MapsWithMe"));
startActivity(intent);
}
}
private boolean isChinaISO(String iso)
{
String arr[] = { "CN", "CHN", "HK", "HKG", "MO", "MAC" };
for (String s : arr)
if (iso.equalsIgnoreCase(s))
return true;
return false;
}
private boolean isChinaRegion()
{
final TelephonyManager tm = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
if (tm != null && tm.getPhoneType() != TelephonyManager.PHONE_TYPE_CDMA)
{
final String iso = tm.getNetworkCountryIso();
Log.i(TAG, "TelephonyManager country ISO = " + iso);
if (isChinaISO(iso))
return true;
}
else
{
final Location l = mApplication.getLocationService().getLastKnown();
if (l != null && nativeIsInChina(l.getLatitude(), l.getLongitude()))
return true;
else
{
final String code = Locale.getDefault().getCountry();
Log.i(TAG, "Locale country ISO = " + code);
if (isChinaISO(code))
return true;
}
}
return false;
}
private void checkFacebookDialog()
{
if ((ConnectionState.getState(this) != ConnectionState.NOT_CONNECTED) &&
mApplication.shouldShowDialog(MWMApplication.FACEBOOK) &&
!isChinaRegion())
{
showDialogImpl(MWMApplication.FACEBOOK, R.string.share_on_facebook_text,
new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
mApplication.submitDialogResult(MWMApplication.FACEBOOK, MWMApplication.OK);
dlg.dismiss();
showFacebookPage();
}
});
}
}
private void showProVersionBanner(final String message)
{
mProDialogMessage = message;
runOnUiThread(new Runnable()
{
@Override
public void run()
{
showDialog(PRO_VERSION_DIALOG);
}
});
}
private void checkBuyProDialog()
{
if (!mApplication.isProVersion() &&
(ConnectionState.getState(this) != ConnectionState.NOT_CONNECTED) &&
mApplication.shouldShowDialog(MWMApplication.BUYPRO))
{
showDialogImpl(MWMApplication.BUYPRO, R.string.pro_version_available,
new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
mApplication.submitDialogResult(MWMApplication.BUYPRO, MWMApplication.OK);
Intent i = new Intent(Intent.ACTION_VIEW, Uri.parse(mApplication.getProVersionURL()));
dlg.dismiss();
startActivity(i);
}
});
}
}
private void runSearchActivity()
{
startActivity(new Intent(this, SearchActivity.class));
}
public void onSearchClicked(View v)
{
if (!mApplication.isProVersion())
{
showProVersionBanner(getString(R.string.search_available_in_pro_version));
}
else
{
if (!getMapStorage().updateMaps(R.string.search_update_maps, this, new MapStorage.UpdateFunctor()
{
@Override
public void doUpdate()
{
runDownloadActivity();
}
@Override
public void doCancel()
{
runSearchActivity();
}
}))
{
runSearchActivity();
}
}
}
@Override
public boolean onSearchRequested()
{
onSearchClicked(null);
return false;
}
private void runDownloadActivity()
{
startActivity(new Intent(this, DownloadUI.class));
}
public void onDownloadClicked(View v)
{
runDownloadActivity();
}
@Override
public void onCreate(Bundle savedInstanceState)
{
// Use full-screen on Kindle Fire only
if (Utils.isKindleFire())
{
getWindow().addFlags(android.view.WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(android.view.WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
}
super.onCreate(savedInstanceState);
mApplication = (MWMApplication)getApplication();
// Do not turn off the screen while benchmarking
if (mApplication.nativeIsBenchmarking())
getWindow().addFlags(android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
nativeSetString("country_status_added_to_queue", getString(R.string.country_status_added_to_queue));
nativeSetString("country_status_downloading", getString(R.string.country_status_downloading));
nativeSetString("country_status_download", getString(R.string.country_status_download));
nativeSetString("country_status_download_failed", getString(R.string.country_status_download_failed));
nativeSetString("try_again", getString(R.string.try_again));
nativeSetString("not_enough_free_space_on_sdcard", getString(R.string.not_enough_free_space_on_sdcard));
nativeConnectDownloadButton();
alignZoomButtons();
BookmarkTouchHandler handler = new BookmarkTouchHandler(m_popupLayout = (PopupLayout)findViewById(R.id.map_popup));
m_BookmarkManager = BookmarkManager.getBookmarkManager(getApplicationContext());
addOnLongClickListener(handler);
addOnClickListener(handler);
}
@Override
protected void onStop()
{
if (mApplication.isProVersion())
{
if (m_popupLayout != null)
{
m_popupLayout.deactivate();
}
}
super.onStop();
}
private class BookmarkTouchHandler implements OnClickListenter, OnLongClickListener
{
private PopupLayout m_PopupLayout;
BookmarkTouchHandler(PopupLayout pop)
{
m_PopupLayout = pop;
}
@Override
public void onLongClick(int x, int y)
{
handleOnSmthClick(x, y, true);
}
@Override
public void onClick(final int x, final int y)
{
if (m_popupLayout.isActive())
{
if(!m_PopupLayout.handleClick(x, y, mApplication.isProVersion()))
{
if (mApplication.isProVersion())
{
handleOnSmthClick(x, y, false);
}
}
else
{
if (!mApplication.isProVersion())
{
showProVersionBanner(getString(R.string.bookmarks_in_pro_version));
}
}
}
else
{
handleOnSmthClick(x, y, false);
}
}
private boolean handleOnSmthClick(int x, int y, boolean longClick)
{
ParcelablePoint b = m_BookmarkManager.findBookmark(new Point(x, y));
if (b != null)
{
m_PopupLayout.activate(m_BookmarkManager.getBookmark(b.getPoint().x, b.getPoint().y));
return true;
}
else
if (m_BookmarkManager.findVisiblePOI(new Point(x, y)))
{
m_PopupLayout.activate(
m_BookmarkManager.previewBookmark(
m_BookmarkManager.getBmkPositionForPOI(new Point(x, y)),
m_BookmarkManager.getNameForPOI(new Point(x, y)) ));
return true;
}
else
if (longClick)
{
m_PopupLayout.activate(
m_BookmarkManager.previewBookmark(
m_BookmarkManager.getBmkPositionForPOI(new Point(x, y)),
m_BookmarkManager.getNameForPOI(new Point(x, y)) ));
return true;
}
else
{
return false;
}
}
}
@Override
public boolean onTouchEvent(MotionEvent event)
{
m_popupLayout.requestInvalidation();
return super.onTouchEvent(event);
}
@Override
public void onDestroy()
{
if (mApplication.isProVersion())
removeOnLongClickListener();
removeOnClickListener();
super.onDestroy();
}
private void alignZoomButtons()
{
// Get screen density
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
final double k = metrics.density;
final int offs = (int)(53 * k); // height of button + half space between buttons.
final int margin = (int)(5 * k);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(margin, (metrics.heightPixels / 4) - offs, margin, margin);
findViewById(R.id.map_button_plus).setLayoutParams(lp);
}
/// @name From Location interface
//@{
@Override
public void onLocationError(int errorCode)
{
nativeOnLocationError(errorCode);
// Notify user about turned off location services
if (errorCode == LocationService.ERROR_DENIED)
{
getLocationState().turnOff();
// Do not show this dialog on Kindle Fire - it doesn't have location services
// and even wifi settings can't be opened programmatically
if (!Utils.isKindleFire())
{
new AlertDialog.Builder(this).setTitle(R.string.location_is_disabled_long_text)
.setPositiveButton(R.string.connection_settings, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
try
{
startActivity(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS));
}
catch (Exception e)
{
// On older Android devices location settings are merged with security
try
{
startActivity(new Intent(android.provider.Settings.ACTION_SECURITY_SETTINGS));
}
catch (Exception ex)
{
Log.e(TAG, "Can't run activity" + ex);
}
}
dialog.dismiss();
}
})
.setNegativeButton(R.string.close, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
dialog.dismiss();
}
})
.create()
.show();
}
}
else if (errorCode == LocationService.ERROR_GPS_OFF)
{
Toast.makeText(this, R.string.gps_is_disabled_long_text, Toast.LENGTH_LONG).show();
}
}
public void onCompassStatusChanged(int newStatus)
{
View v = findViewById(R.id.map_button_myposition);
if (newStatus == 1)
{
v.setBackgroundResource(R.drawable.myposition_button_follow);
}
else
{
if (getLocationState().hasPosition())
v.setBackgroundResource(R.drawable.myposition_button_found);
else
v.setBackgroundResource(R.drawable.myposition_button_normal);
}
v.setSelected(true);
}
public void OnCompassStatusChanged(int newStatus)
{
final int val = newStatus;
runOnUiThread(new Runnable()
{
@Override
public void run()
{
onCompassStatusChanged(val);
}
});
}
@Override
public void onLocationUpdated(long time, double lat, double lon, float accuracy)
{
if (getLocationState().isFirstPosition())
{
final View v = findViewById(R.id.map_button_myposition);
v.setBackgroundResource(R.drawable.myposition_button_found);
v.setSelected(true);
}
nativeLocationUpdated(time, lat, lon, accuracy);
}
@Override
public void onCompassUpdated(long time, double magneticNorth, double trueNorth, double accuracy)
{
double angles[] = { magneticNorth, trueNorth };
getLocationService().correctCompassAngles(getWindowManager().getDefaultDisplay(), angles);
nativeCompassUpdated(time, angles[0], angles[1], accuracy);
}
//@}
private int m_compassStatusListenerID = -1;
private void startWatchingCompassStatusUpdate()
{
m_compassStatusListenerID = mApplication.getLocationState().addCompassStatusListener(this);
}
private void stopWatchingCompassStatusUpdate()
{
mApplication.getLocationState().removeCompassStatusListener(m_compassStatusListenerID);
}
@Override
protected void onPause()
{
pauseLocation();
stopWatchingExternalStorage();
stopWatchingCompassStatusUpdate();
super.onPause();
}
@Override
protected void onResume()
{
checkShouldResumeLocationService();
startWatchingCompassStatusUpdate();
startWatchingExternalStorage();
m_popupLayout.requestInvalidation();
super.onResume();
}
@Override
public boolean onCreateOptionsMenu(Menu menu)
{
return ContextMenu.onCreateOptionsMenu(this, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
if (ContextMenu.onOptionsItemSelected(this, item))
return true;
else
return super.onOptionsItemSelected(item);
}
// Initialized to invalid combination to force update on the first check
private boolean m_storageAvailable = false;
private boolean m_storageWriteable = true;
private void updateExternalStorageState()
{
boolean available, writeable;
final String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state))
{
available = writeable = true;
}
else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state))
{
available = true;
writeable = false;
}
else
available = writeable = false;
if (m_storageAvailable != available || m_storageWriteable != writeable)
{
m_storageAvailable = available;
m_storageWriteable = writeable;
handleExternalStorageState(available, writeable);
}
}
private void handleExternalStorageState(boolean available, boolean writeable)
{
if (available && writeable)
{
// Add local maps to the model
nativeStorageConnected();
// enable downloader button and dismiss blocking popup
findViewById(R.id.map_button_download).setVisibility(View.VISIBLE);
if (m_storageDisconnectedDialog != null)
m_storageDisconnectedDialog.dismiss();
}
else if (available)
{
// Add local maps to the model
nativeStorageConnected();
// disable downloader button and dismiss blocking popup
findViewById(R.id.map_button_download).setVisibility(View.INVISIBLE);
if (m_storageDisconnectedDialog != null)
m_storageDisconnectedDialog.dismiss();
}
else
{
// Remove local maps from the model
nativeStorageDisconnected();
// enable downloader button and show blocking popup
findViewById(R.id.map_button_download).setVisibility(View.VISIBLE);
if (m_storageDisconnectedDialog == null)
{
m_storageDisconnectedDialog = new AlertDialog.Builder(this)
.setTitle(R.string.external_storage_is_not_available)
.setMessage(getString(R.string.disconnect_usb_cable))
.setCancelable(false)
.create();
}
m_storageDisconnectedDialog.show();
}
}
private boolean isActivityPaused()
{
// This receiver is null only when activity is paused (see onPause, onResume).
return (m_externalStorageReceiver == null);
}
private void startWatchingExternalStorage()
{
m_externalStorageReceiver = new BroadcastReceiver()
{
@Override
public void onReceive(Context context, Intent intent)
{
updateExternalStorageState();
}
};
IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_MEDIA_MOUNTED);
filter.addAction(Intent.ACTION_MEDIA_REMOVED);
filter.addAction(Intent.ACTION_MEDIA_EJECT);
filter.addAction(Intent.ACTION_MEDIA_SHARED);
filter.addAction(Intent.ACTION_MEDIA_UNMOUNTED);
filter.addAction(Intent.ACTION_MEDIA_BAD_REMOVAL);
filter.addAction(Intent.ACTION_MEDIA_UNMOUNTABLE);
filter.addAction(Intent.ACTION_MEDIA_CHECKING);
filter.addAction(Intent.ACTION_MEDIA_NOFS);
filter.addDataScheme("file");
registerReceiver(m_externalStorageReceiver, filter);
updateExternalStorageState();
}
@Override
@Deprecated
protected void onPrepareDialog(int id, Dialog dialog, Bundle args)
{
if (id == PRO_VERSION_DIALOG)
{
((AlertDialog)dialog).setMessage(mProDialogMessage);
}
else
{
super.onPrepareDialog(id, dialog, args);
}
}
@Override
@Deprecated
protected Dialog onCreateDialog(int id)
{
if (id == PRO_VERSION_DIALOG)
{
return new AlertDialog.Builder(getActivity())
.setMessage("")
.setPositiveButton(getString(R.string.get_it_now), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
Intent i = new Intent(Intent.ACTION_VIEW, Uri.parse(mApplication.getProVersionURL()));
dlg.dismiss();
startActivity(i);
}
})
.setNegativeButton(getString(R.string.cancel), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
dlg.dismiss();
}
})
.create();
}
else
return super.onCreateDialog(id);
}
private void stopWatchingExternalStorage()
{
if (m_externalStorageReceiver != null)
{
unregisterReceiver(m_externalStorageReceiver);
m_externalStorageReceiver = null;
}
}
private native void nativeSetString(String name, String value);
private native void nativeStorageConnected();
private native void nativeStorageDisconnected();
private native void nativeConnectDownloadButton();
private native void nativeDownloadCountry();
private native void nativeDestroy();
private native void nativeOnLocationError(int errorCode);
private native void nativeLocationUpdated(long time, double lat, double lon, float accuracy);
private native void nativeCompassUpdated(long time, double magneticNorth, double trueNorth, double accuracy);
private native boolean nativeIsInChina(double lat, double lon);
}
| android/src/com/mapswithme/maps/MWMActivity.java | package com.mapswithme.maps;
import java.util.Locale;
import android.app.Activity;
import android.app.AlertDialog;
import android.app.Dialog;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Configuration;
import android.content.res.Resources;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Point;
import android.location.Location;
import android.net.Uri;
import android.os.Bundle;
import android.os.Environment;
import android.telephony.TelephonyManager;
import android.text.TextUtils;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.MotionEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.View;
import android.widget.LinearLayout;
import android.widget.Toast;
import com.mapswithme.maps.bookmarks.BookmarkActivity;
import com.mapswithme.maps.bookmarks.BookmarkCategoriesActivity;
import com.mapswithme.maps.bookmarks.PopupLayout;
import com.mapswithme.maps.bookmarks.data.Bookmark;
import com.mapswithme.maps.bookmarks.data.BookmarkManager;
import com.mapswithme.maps.bookmarks.data.ParcelablePoint;
import com.mapswithme.maps.location.LocationService;
import com.mapswithme.maps.settings.UnitLocale;
import com.mapswithme.util.ConnectionState;
import com.mapswithme.util.Utils;
import com.nvidia.devtech.NvEventQueueActivity;
public class MWMActivity extends NvEventQueueActivity implements LocationService.Listener
{
private static final int PRO_VERSION_DIALOG = 110001;
private static final String PRO_VERSION_DIALOG_MSG = "pro_version_dialog_msg";
//VideoTimer m_timer;
private static String TAG = "MWMActivity";
private MWMApplication mApplication = null;
private BroadcastReceiver m_externalStorageReceiver = null;
private AlertDialog m_storageDisconnectedDialog = null;
private BookmarkManager m_BookmarkManager;
//showDialog(int, Bundle) available only form API 8
private String mProDialogMessage;
private interface OnLongClickListener
{
void onLongClick(int x, int y);
}
private interface OnClickListenter
{
void onClick(int x, int y);
}
private native void addOnLongClickListener(Object l);
private native void removeOnLongClickListener();
private native void addOnClickListener(Object l);
private native void removeOnClickListener();
private LocationService getLocationService()
{
return mApplication.getLocationService();
}
private MapStorage getMapStorage()
{
return mApplication.getMapStorage();
}
private LocationState getLocationState()
{
return mApplication.getLocationState();
}
private void startLocation()
{
getLocationState().onStartLocation();
resumeLocation();
}
private void stopLocation()
{
getLocationState().onStopLocation();
pauseLocation();
}
private void pauseLocation()
{
getLocationService().stopUpdate(this);
// Enable automatic turning screen off while app is idle
Utils.automaticIdleScreen(true, getWindow());
}
private void resumeLocation()
{
getLocationService().startUpdate(this);
// Do not turn off the screen while displaying position
Utils.automaticIdleScreen(false, getWindow());
}
public void checkShouldResumeLocationService()
{
final View v = findViewById(R.id.map_button_myposition);
if (v != null)
{
final LocationState state = getLocationState();
final boolean hasPosition = state.hasPosition();
// check if we need to start location observing
int resID = 0;
if (hasPosition)
resID = R.drawable.myposition_button_found;
else if (state.isFirstPosition())
resID = R.drawable.myposition_button_normal;
if (resID != 0)
{
if (hasPosition && (state.getCompassProcessMode() == LocationState.COMPASS_FOLLOW))
{
state.startCompassFollowing();
v.setBackgroundResource(R.drawable.myposition_button_follow);
}
else
v.setBackgroundResource(resID);
v.setSelected(true);
// start observing in the end (button state can changed here from normal to found).
resumeLocation();
}
else
{
v.setBackgroundResource(R.drawable.myposition_button_normal);
v.setSelected(false);
}
}
}
public void OnDownloadCountryClicked()
{
runOnUiThread(new Runnable()
{
@Override
public void run()
{
nativeDownloadCountry();
}
});
}
@Override
public void OnRenderingInitialized()
{
runOnUiThread(new Runnable()
{
@Override
public void run()
{
// Run all checks in main thread after rendering is initialized.
checkMeasurementSystem();
checkUpdateMaps();
checkFacebookDialog();
checkBuyProDialog();
}
});
}
private Activity getActivity() { return this; }
@Override
public void ReportUnsupported()
{
runOnUiThread(new Runnable()
{
@Override
public void run()
{
new AlertDialog.Builder(getActivity())
.setMessage(getString(R.string.unsupported_phone))
.setCancelable(false)
.setPositiveButton(getString(R.string.close), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
getActivity().moveTaskToBack(true);
dlg.dismiss();
}
})
.create()
.show();
}
});
}
private void checkMeasurementSystem()
{
UnitLocale.initializeCurrentUnits();
}
private native void nativeScale(double k);
public void onPlusClicked(View v)
{
nativeScale(3.0 / 2);
}
public void onMinusClicked(View v)
{
nativeScale(2.0 / 3);
}
public void onBookmarksClicked(View v)
{
if (!mApplication.isProVersion())
{
showProVersionBanner(getString(R.string.bookmarks_in_pro_version));
}
else
{
startActivity(new Intent(this, BookmarkCategoriesActivity.class));
}
}
public void onMyPositionClicked(View v)
{
final LocationState state = mApplication.getLocationState();
if (!state.hasPosition())
{
if (!state.isFirstPosition())
{
// If first time pressed - start location observing:
// Set the button state to "searching" first ...
v.setBackgroundResource(R.drawable.myposition_button_normal);
v.setSelected(true);
// ... and then call startLocation, as there could be my_position button
// state changes in the startLocation.
startLocation();
return;
}
}
else
{
if (!state.isCentered())
{
state.animateToPositionAndEnqueueLocationProcessMode(LocationState.LOCATION_CENTER_ONLY);
v.setSelected(true);
return;
}
else
if (mApplication.isProVersion())
{
// Check if we need to start compass following.
if (state.hasCompass())
{
if (state.getCompassProcessMode() != LocationState.COMPASS_FOLLOW)
{
state.startCompassFollowing();
v.setBackgroundResource(R.drawable.myposition_button_follow);
v.setSelected(true);
return;
}
else
state.stopCompassFollowingAndRotateMap();
}
}
}
// Turn off location search:
// Stop location observing first ...
stopLocation();
// ... and then set button state to default.
v.setBackgroundResource(R.drawable.myposition_button_normal);
v.setSelected(false);
}
private boolean m_needCheckUpdate = true;
private PopupLayout m_popupLayout;
private void checkUpdateMaps()
{
// do it only once
if (m_needCheckUpdate)
{
m_needCheckUpdate = false;
getMapStorage().updateMaps(R.string.advise_update_maps, this, new MapStorage.UpdateFunctor()
{
@Override
public void doUpdate()
{
runDownloadActivity();
}
@Override
public void doCancel()
{
}
});
}
}
@Override
public void onConfigurationChanged(Configuration newConfig)
{
super.onConfigurationChanged(newConfig);
alignZoomButtons();
}
private void showDialogImpl(final int dlgID, int resMsg, DialogInterface.OnClickListener okListener)
{
new AlertDialog.Builder(this)
.setCancelable(false)
.setMessage(getString(resMsg))
.setPositiveButton(getString(R.string.ok), okListener)
.setNeutralButton(getString(R.string.never), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
dlg.dismiss();
mApplication.submitDialogResult(dlgID, MWMApplication.NEVER);
}
})
.setNegativeButton(getString(R.string.later), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
dlg.dismiss();
mApplication.submitDialogResult(dlgID, MWMApplication.LATER);
}
})
.create()
.show();
}
private void showFacebookPage()
{
Intent intent = null;
try
{
// Trying to find package with installed Facebook application.
// Exception is thrown if we don't have one.
getPackageManager().getPackageInfo("com.facebook.katana", 0);
// Profile id is taken from http://graph.facebook.com/MapsWithMe
intent = new Intent(Intent.ACTION_VIEW, Uri.parse("fb://profile/111923085594432"));
// throws ActivityNotFoundException
startActivity(intent);
}
catch (Exception e)
{
// Show Facebook page in browser.
intent = new Intent(Intent.ACTION_VIEW, Uri.parse("https://www.facebook.com/MapsWithMe"));
startActivity(intent);
}
}
private boolean isChinaISO(String iso)
{
String arr[] = { "CN", "CHN", "HK", "HKG", "MO", "MAC" };
for (String s : arr)
if (iso.equalsIgnoreCase(s))
return true;
return false;
}
private boolean isChinaRegion()
{
final TelephonyManager tm = (TelephonyManager) getSystemService(Context.TELEPHONY_SERVICE);
if (tm != null && tm.getPhoneType() != TelephonyManager.PHONE_TYPE_CDMA)
{
final String iso = tm.getNetworkCountryIso();
Log.i(TAG, "TelephonyManager country ISO = " + iso);
if (isChinaISO(iso))
return true;
}
else
{
final Location l = mApplication.getLocationService().getLastKnown();
if (l != null && nativeIsInChina(l.getLatitude(), l.getLongitude()))
return true;
else
{
final String code = Locale.getDefault().getCountry();
Log.i(TAG, "Locale country ISO = " + code);
if (isChinaISO(code))
return true;
}
}
return false;
}
private void checkFacebookDialog()
{
if ((ConnectionState.getState(this) != ConnectionState.NOT_CONNECTED) &&
mApplication.shouldShowDialog(MWMApplication.FACEBOOK) &&
!isChinaRegion())
{
showDialogImpl(MWMApplication.FACEBOOK, R.string.share_on_facebook_text,
new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
mApplication.submitDialogResult(MWMApplication.FACEBOOK, MWMApplication.OK);
dlg.dismiss();
showFacebookPage();
}
});
}
}
private void showProVersionBanner(final String message)
{
mProDialogMessage = message;
runOnUiThread(new Runnable()
{
@Override
public void run()
{
showDialog(PRO_VERSION_DIALOG);
}
});
}
private void checkBuyProDialog()
{
if (!mApplication.isProVersion() &&
(ConnectionState.getState(this) != ConnectionState.NOT_CONNECTED) &&
mApplication.shouldShowDialog(MWMApplication.BUYPRO))
{
showDialogImpl(MWMApplication.BUYPRO, R.string.pro_version_available,
new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
mApplication.submitDialogResult(MWMApplication.BUYPRO, MWMApplication.OK);
Intent i = new Intent(Intent.ACTION_VIEW, Uri.parse(mApplication.getProVersionURL()));
dlg.dismiss();
startActivity(i);
}
});
}
}
private void runSearchActivity()
{
startActivity(new Intent(this, SearchActivity.class));
}
public void onSearchClicked(View v)
{
if (!mApplication.isProVersion())
{
showProVersionBanner(getString(R.string.search_available_in_pro_version));
}
else
{
if (!getMapStorage().updateMaps(R.string.search_update_maps, this, new MapStorage.UpdateFunctor()
{
@Override
public void doUpdate()
{
runDownloadActivity();
}
@Override
public void doCancel()
{
runSearchActivity();
}
}))
{
runSearchActivity();
}
}
}
@Override
public boolean onSearchRequested()
{
onSearchClicked(null);
return false;
}
private void runDownloadActivity()
{
startActivity(new Intent(this, DownloadUI.class));
}
public void onDownloadClicked(View v)
{
runDownloadActivity();
}
@Override
public void onCreate(Bundle savedInstanceState)
{
// Use full-screen on Kindle Fire only
if (Utils.isKindleFire())
{
getWindow().addFlags(android.view.WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().clearFlags(android.view.WindowManager.LayoutParams.FLAG_FORCE_NOT_FULLSCREEN);
}
super.onCreate(savedInstanceState);
mApplication = (MWMApplication)getApplication();
// Do not turn off the screen while benchmarking
if (mApplication.nativeIsBenchmarking())
getWindow().addFlags(android.view.WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
nativeSetString("country_status_added_to_queue", getString(R.string.country_status_added_to_queue));
nativeSetString("country_status_downloading", getString(R.string.country_status_downloading));
nativeSetString("country_status_download", getString(R.string.country_status_download));
nativeSetString("country_status_download_failed", getString(R.string.country_status_download_failed));
nativeSetString("try_again", getString(R.string.try_again));
nativeSetString("not_enough_free_space_on_sdcard", getString(R.string.not_enough_free_space_on_sdcard));
nativeConnectDownloadButton();
alignZoomButtons();
BookmarkTouchHandler handler = new BookmarkTouchHandler(m_popupLayout = (PopupLayout)findViewById(R.id.map_popup));
m_BookmarkManager = BookmarkManager.getBookmarkManager(getApplicationContext());
addOnLongClickListener(handler);
addOnClickListener(handler);
}
@Override
protected void onStop()
{
if (mApplication.isProVersion())
{
if (m_popupLayout != null)
{
m_popupLayout.deactivate();
}
}
super.onStop();
}
private class BookmarkTouchHandler implements OnClickListenter, OnLongClickListener
{
private PopupLayout m_PopupLayout;
BookmarkTouchHandler(PopupLayout pop)
{
m_PopupLayout = pop;
}
@Override
public void onLongClick(int x, int y)
{
handleOnSmthClick(x, y, true);
}
@Override
public void onClick(final int x, final int y)
{
if (m_popupLayout.isActive())
{
if(!m_PopupLayout.handleClick(x, y, mApplication.isProVersion()))
{
if (mApplication.isProVersion())
{
handleOnSmthClick(x, y, false);
}
}
else
{
showProVersionBanner(getString(R.string.bookmarks_in_pro_version));
}
}
else
{
handleOnSmthClick(x, y, false);
}
}
private boolean handleOnSmthClick(int x, int y, boolean longClick)
{
ParcelablePoint b = m_BookmarkManager.findBookmark(new Point(x, y));
if (b != null)
{
m_PopupLayout.activate(m_BookmarkManager.getBookmark(b.getPoint().x, b.getPoint().y));
return true;
}
else
if (m_BookmarkManager.findVisiblePOI(new Point(x, y)))
{
m_PopupLayout.activate(
m_BookmarkManager.previewBookmark(
m_BookmarkManager.getBmkPositionForPOI(new Point(x, y)),
m_BookmarkManager.getNameForPOI(new Point(x, y)) ));
return true;
}
else
if (longClick)
{
m_PopupLayout.activate(
m_BookmarkManager.previewBookmark(
m_BookmarkManager.getBmkPositionForPOI(new Point(x, y)),
m_BookmarkManager.getNameForPOI(new Point(x, y)) ));
return true;
}
else
{
return false;
}
}
}
@Override
public boolean onTouchEvent(MotionEvent event)
{
m_popupLayout.requestInvalidation();
return super.onTouchEvent(event);
}
@Override
public void onDestroy()
{
if (mApplication.isProVersion())
removeOnLongClickListener();
removeOnClickListener();
super.onDestroy();
}
private void alignZoomButtons()
{
// Get screen density
DisplayMetrics metrics = new DisplayMetrics();
getWindowManager().getDefaultDisplay().getMetrics(metrics);
final double k = metrics.density;
final int offs = (int)(53 * k); // height of button + half space between buttons.
final int margin = (int)(5 * k);
LinearLayout.LayoutParams lp = new LinearLayout.LayoutParams(LinearLayout.LayoutParams.WRAP_CONTENT,
LinearLayout.LayoutParams.WRAP_CONTENT);
lp.setMargins(margin, (metrics.heightPixels / 4) - offs, margin, margin);
findViewById(R.id.map_button_plus).setLayoutParams(lp);
}
/// @name From Location interface
//@{
@Override
public void onLocationError(int errorCode)
{
nativeOnLocationError(errorCode);
// Notify user about turned off location services
if (errorCode == LocationService.ERROR_DENIED)
{
getLocationState().turnOff();
// Do not show this dialog on Kindle Fire - it doesn't have location services
// and even wifi settings can't be opened programmatically
if (!Utils.isKindleFire())
{
new AlertDialog.Builder(this).setTitle(R.string.location_is_disabled_long_text)
.setPositiveButton(R.string.connection_settings, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
try
{
startActivity(new Intent(android.provider.Settings.ACTION_LOCATION_SOURCE_SETTINGS));
}
catch (Exception e)
{
// On older Android devices location settings are merged with security
try
{
startActivity(new Intent(android.provider.Settings.ACTION_SECURITY_SETTINGS));
}
catch (Exception ex)
{
Log.e(TAG, "Can't run activity" + ex);
}
}
dialog.dismiss();
}
})
.setNegativeButton(R.string.close, new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dialog, int which)
{
dialog.dismiss();
}
})
.create()
.show();
}
}
else if (errorCode == LocationService.ERROR_GPS_OFF)
{
Toast.makeText(this, R.string.gps_is_disabled_long_text, Toast.LENGTH_LONG).show();
}
}
public void onCompassStatusChanged(int newStatus)
{
View v = findViewById(R.id.map_button_myposition);
if (newStatus == 1)
{
v.setBackgroundResource(R.drawable.myposition_button_follow);
}
else
{
if (getLocationState().hasPosition())
v.setBackgroundResource(R.drawable.myposition_button_found);
else
v.setBackgroundResource(R.drawable.myposition_button_normal);
}
v.setSelected(true);
}
public void OnCompassStatusChanged(int newStatus)
{
final int val = newStatus;
runOnUiThread(new Runnable()
{
@Override
public void run()
{
onCompassStatusChanged(val);
}
});
}
@Override
public void onLocationUpdated(long time, double lat, double lon, float accuracy)
{
if (getLocationState().isFirstPosition())
{
final View v = findViewById(R.id.map_button_myposition);
v.setBackgroundResource(R.drawable.myposition_button_found);
v.setSelected(true);
}
nativeLocationUpdated(time, lat, lon, accuracy);
}
@Override
public void onCompassUpdated(long time, double magneticNorth, double trueNorth, double accuracy)
{
double angles[] = { magneticNorth, trueNorth };
getLocationService().correctCompassAngles(getWindowManager().getDefaultDisplay(), angles);
nativeCompassUpdated(time, angles[0], angles[1], accuracy);
}
//@}
private int m_compassStatusListenerID = -1;
private void startWatchingCompassStatusUpdate()
{
m_compassStatusListenerID = mApplication.getLocationState().addCompassStatusListener(this);
}
private void stopWatchingCompassStatusUpdate()
{
mApplication.getLocationState().removeCompassStatusListener(m_compassStatusListenerID);
}
@Override
protected void onPause()
{
pauseLocation();
stopWatchingExternalStorage();
stopWatchingCompassStatusUpdate();
super.onPause();
}
@Override
protected void onResume()
{
checkShouldResumeLocationService();
startWatchingCompassStatusUpdate();
startWatchingExternalStorage();
m_popupLayout.requestInvalidation();
super.onResume();
}
@Override
public boolean onCreateOptionsMenu(Menu menu)
{
return ContextMenu.onCreateOptionsMenu(this, menu);
}
@Override
public boolean onOptionsItemSelected(MenuItem item)
{
if (ContextMenu.onOptionsItemSelected(this, item))
return true;
else
return super.onOptionsItemSelected(item);
}
// Initialized to invalid combination to force update on the first check
private boolean m_storageAvailable = false;
private boolean m_storageWriteable = true;
private void updateExternalStorageState()
{
boolean available, writeable;
final String state = Environment.getExternalStorageState();
if (Environment.MEDIA_MOUNTED.equals(state))
{
available = writeable = true;
}
else if (Environment.MEDIA_MOUNTED_READ_ONLY.equals(state))
{
available = true;
writeable = false;
}
else
available = writeable = false;
if (m_storageAvailable != available || m_storageWriteable != writeable)
{
m_storageAvailable = available;
m_storageWriteable = writeable;
handleExternalStorageState(available, writeable);
}
}
private void handleExternalStorageState(boolean available, boolean writeable)
{
if (available && writeable)
{
// Add local maps to the model
nativeStorageConnected();
// enable downloader button and dismiss blocking popup
findViewById(R.id.map_button_download).setVisibility(View.VISIBLE);
if (m_storageDisconnectedDialog != null)
m_storageDisconnectedDialog.dismiss();
}
else if (available)
{
// Add local maps to the model
nativeStorageConnected();
// disable downloader button and dismiss blocking popup
findViewById(R.id.map_button_download).setVisibility(View.INVISIBLE);
if (m_storageDisconnectedDialog != null)
m_storageDisconnectedDialog.dismiss();
}
else
{
// Remove local maps from the model
nativeStorageDisconnected();
// enable downloader button and show blocking popup
findViewById(R.id.map_button_download).setVisibility(View.VISIBLE);
if (m_storageDisconnectedDialog == null)
{
m_storageDisconnectedDialog = new AlertDialog.Builder(this)
.setTitle(R.string.external_storage_is_not_available)
.setMessage(getString(R.string.disconnect_usb_cable))
.setCancelable(false)
.create();
}
m_storageDisconnectedDialog.show();
}
}
private boolean isActivityPaused()
{
// This receiver is null only when activity is paused (see onPause, onResume).
return (m_externalStorageReceiver == null);
}
private void startWatchingExternalStorage()
{
m_externalStorageReceiver = new BroadcastReceiver()
{
@Override
public void onReceive(Context context, Intent intent)
{
updateExternalStorageState();
}
};
IntentFilter filter = new IntentFilter();
filter.addAction(Intent.ACTION_MEDIA_MOUNTED);
filter.addAction(Intent.ACTION_MEDIA_REMOVED);
filter.addAction(Intent.ACTION_MEDIA_EJECT);
filter.addAction(Intent.ACTION_MEDIA_SHARED);
filter.addAction(Intent.ACTION_MEDIA_UNMOUNTED);
filter.addAction(Intent.ACTION_MEDIA_BAD_REMOVAL);
filter.addAction(Intent.ACTION_MEDIA_UNMOUNTABLE);
filter.addAction(Intent.ACTION_MEDIA_CHECKING);
filter.addAction(Intent.ACTION_MEDIA_NOFS);
filter.addDataScheme("file");
registerReceiver(m_externalStorageReceiver, filter);
updateExternalStorageState();
}
@Override
@Deprecated
protected void onPrepareDialog(int id, Dialog dialog, Bundle args)
{
if (id == PRO_VERSION_DIALOG)
{
((AlertDialog)dialog).setMessage(mProDialogMessage);
}
else
{
super.onPrepareDialog(id, dialog, args);
}
}
@Override
@Deprecated
protected Dialog onCreateDialog(int id)
{
if (id == PRO_VERSION_DIALOG)
{
return new AlertDialog.Builder(getActivity())
.setMessage("")
.setPositiveButton(getString(R.string.get_it_now), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
Intent i = new Intent(Intent.ACTION_VIEW, Uri.parse(mApplication.getProVersionURL()));
dlg.dismiss();
startActivity(i);
}
})
.setNegativeButton(getString(R.string.cancel), new DialogInterface.OnClickListener()
{
@Override
public void onClick(DialogInterface dlg, int which)
{
dlg.dismiss();
}
})
.create();
}
else
return super.onCreateDialog(id);
}
private void stopWatchingExternalStorage()
{
if (m_externalStorageReceiver != null)
{
unregisterReceiver(m_externalStorageReceiver);
m_externalStorageReceiver = null;
}
}
private native void nativeSetString(String name, String value);
private native void nativeStorageConnected();
private native void nativeStorageDisconnected();
private native void nativeConnectDownloadButton();
private native void nativeDownloadCountry();
private native void nativeDestroy();
private native void nativeOnLocationError(int errorCode);
private native void nativeLocationUpdated(long time, double lat, double lon, float accuracy);
private native void nativeCompassUpdated(long time, double magneticNorth, double trueNorth, double accuracy);
private native boolean nativeIsInChina(double lat, double lon);
}
| [android] [bookmarks] fix for pro dialog
| android/src/com/mapswithme/maps/MWMActivity.java | [android] [bookmarks] fix for pro dialog |
|
Java | apache-2.0 | 454fe1b9acfb49489e4e67ecbb0b231d52d7e420 | 0 | nirmal070125/KubernetesAPIJavaClient,carlossg/KubernetesAPIJavaClient | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.stratos.kubernetes.api.client;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpStatus;
import org.apache.stratos.kubernetes.api.client.interfaces.KubernetesAPIClientInterface;
import org.apache.stratos.kubernetes.api.exceptions.KubernetesClientException;
import org.apache.stratos.kubernetes.api.model.Pod;
import org.apache.stratos.kubernetes.api.model.PodList;
import org.apache.stratos.kubernetes.api.model.ReplicationController;
import org.apache.stratos.kubernetes.api.model.ReplicationControllerList;
import org.apache.stratos.kubernetes.api.model.Service;
import org.apache.stratos.kubernetes.api.model.ServiceList;
import org.jboss.resteasy.client.ClientRequest;
import org.jboss.resteasy.client.ClientResponse;
public class KubernetesApiClient implements KubernetesAPIClientInterface {
private String endpointUrl;
private static final Log log = LogFactory.getLog(KubernetesApiClient.class);
public KubernetesApiClient(String endpointUrl) {
this.endpointUrl = endpointUrl;
}
@Override
public Pod getPod(String podId) throws KubernetesClientException{
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods/{podId}");
ClientResponse<Pod> res = request.pathParameter("podId", podId).get(Pod.class);
if (res.getEntity() == null ) {
String msg = "Pod ["+podId+"] doesn't exist.";
log.error(msg);
throw new KubernetesClientException(msg);
}
return res.getEntity();
} catch (Exception e) {
String msg = "Error while retrieving Pod info with Pod ID: "+podId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public PodList getAllPods() throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods/");
ClientResponse<PodList> res = request.get(PodList.class);
if (res.getEntity() == null ) {
return new PodList();
}
PodList podList = new PodList();
podList.setItems(res.getEntity().getItems());
return podList;
} catch (Exception e) {
String msg = "Error while retrieving Pods.";
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void createPod(Pod pod) throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods");
ClientResponse<?> res = request.body("application/json", pod).post();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Pod ["+pod+"] creation failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while creating Pod: "+pod;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void deletePod(String podId) throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods/{podId}");
ClientResponse<?> res = request.pathParameter("podId", podId).delete();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Pod ["+podId+"] deletion failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while retrieving Pod info of Pod ID: "+podId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public ReplicationController getReplicationController(String controllerId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"replicationControllers/{controllerId}");
ClientResponse<ReplicationController> res = request.pathParameter("controllerId", controllerId)
.get(ReplicationController.class);
if (res.getEntity() == null ) {
String msg = "Replication Controller ["+controllerId+"] doesn't exist.";
log.error(msg);
throw new KubernetesClientException(msg);
}
return res.getEntity();
} catch (Exception e) {
String msg = "Error while retrieving Replication Controller info with ID: "+controllerId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public ReplicationController[] getAllReplicationControllers()
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"replicationControllers/");
ClientResponse<ReplicationControllerList> res = request.get(ReplicationControllerList.class);
if (res.getEntity() == null ) {
return new ReplicationController[0];
}
return res.getEntity().getItems();
} catch (Exception e) {
String msg = "Error while retrieving Replication Controllers.";
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void createReplicationController(ReplicationController controller)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl
+ "replicationControllers/");
ClientResponse<?> res = request
.body("application/json", controller).post();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_OK ||
res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED ) {
String msg = "Replication Controller [" + controller
+ "] creation failed. Error: "
+ res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while creating Replication Controller: "
+ controller;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void deleteReplicationController(String controllerId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"replicationControllers/{controllerId}");
ClientResponse<?> res = request.pathParameter("controllerId", controllerId).delete();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Replication Controller ["+controllerId+"] deletion failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while retrieving Replication Controller info of Controller ID: "+controllerId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public Service getService(String serviceId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/{serviceId}");
ClientResponse<Service> res = request.pathParameter("serviceId", serviceId).get(Service.class);
if (res.getEntity() == null ) {
String msg = "Service ["+serviceId+"] doesn't exist.";
log.error(msg);
throw new KubernetesClientException(msg);
}
return res.getEntity();
} catch (Exception e) {
String msg = "Error while retrieving Service info with Service ID: "+serviceId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public ServiceList getAllServices() throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/");
ClientResponse<ServiceList> res = request.get(ServiceList.class);
if (res.getEntity() == null ) {
return new ServiceList();
}
ServiceList serviceList = new ServiceList();
serviceList.setItems(res.getEntity().getItems());
return serviceList;
} catch (Exception e) {
String msg = "Error while retrieving Services.";
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void createService(Service service) throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/");
ClientResponse<?> res = request.body("application/json", service).post();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Service ["+service+"] creation failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while creating the Service: "+service;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void deleteService(String serviceId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/{serviceId}");
ClientResponse<?> res = request.pathParameter("serviceId", serviceId).delete();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Service ["+serviceId+"] deletion failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while retrieving Service info of Service ID: "+serviceId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
}
| src/main/java/org/apache/stratos/kubernetes/api/client/KubernetesApiClient.java | /*
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.stratos.kubernetes.api.client;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.HttpStatus;
import org.apache.stratos.kubernetes.api.client.interfaces.KubernetesAPIClientInterface;
import org.apache.stratos.kubernetes.api.exceptions.KubernetesClientException;
import org.apache.stratos.kubernetes.api.model.Pod;
import org.apache.stratos.kubernetes.api.model.PodList;
import org.apache.stratos.kubernetes.api.model.ReplicationController;
import org.apache.stratos.kubernetes.api.model.ReplicationControllerList;
import org.apache.stratos.kubernetes.api.model.Service;
import org.apache.stratos.kubernetes.api.model.ServiceList;
import org.jboss.resteasy.client.ClientRequest;
import org.jboss.resteasy.client.ClientResponse;
public class KubernetesApiClient implements KubernetesAPIClientInterface {
private String endpointUrl;
private static final Log log = LogFactory.getLog(KubernetesApiClient.class);
public KubernetesApiClient(String endpointUrl) {
this.endpointUrl = endpointUrl;
}
@Override
public Pod getPod(String podId) throws KubernetesClientException{
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods/{podId}");
ClientResponse<Pod> res = request.pathParameter("podId", podId).get(Pod.class);
if (res.getEntity() == null ) {
String msg = "Pod ["+podId+"] doesn't exist.";
log.error(msg);
throw new KubernetesClientException(msg);
}
return res.getEntity();
} catch (Exception e) {
String msg = "Error while retrieving Pod info with Pod ID: "+podId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public PodList getAllPods() throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods/");
ClientResponse<PodList> res = request.get(PodList.class);
if (res.getEntity() == null ) {
return new PodList();
}
PodList podList = new PodList();
podList.setItems(res.getEntity().getItems());
return podList;
} catch (Exception e) {
String msg = "Error while retrieving Pods.";
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void createPod(Pod pod) throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods");
ClientResponse<?> res = request.body("application/json", pod).post();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Pod ["+pod+"] creation failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while creating Pod: "+pod;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void deletePod(String podId) throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"pods/{podId}");
ClientResponse<?> res = request.pathParameter("podId", podId).delete();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Pod ["+podId+"] deletion failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while retrieving Pod info of Pod ID: "+podId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public ReplicationController getReplicationController(String controllerId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"replicationControllers/{controllerId}");
ClientResponse<ReplicationController> res = request.pathParameter("controllerId", controllerId)
.get(ReplicationController.class);
if (res.getEntity() == null ) {
String msg = "Replication Controller ["+controllerId+"] doesn't exist.";
log.error(msg);
throw new KubernetesClientException(msg);
}
return res.getEntity();
} catch (Exception e) {
String msg = "Error while retrieving Replication Controller info with ID: "+controllerId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public ReplicationController[] getAllReplicationControllers()
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"replicationControllers/");
ClientResponse<ReplicationControllerList> res = request.get(ReplicationControllerList.class);
if (res.getEntity() == null ) {
return new ReplicationController[0];
}
return res.getEntity().getItems();
} catch (Exception e) {
String msg = "Error while retrieving Replication Controllers.";
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void createReplicationController(ReplicationController controller)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl
+ "replicationControllers/");
ClientResponse<?> res = request
.body("application/json", controller).post();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Replication Controller [" + controller
+ "] creation failed. Error: "
+ res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while creating Replication Controller: "
+ controller;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void deleteReplicationController(String controllerId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"replicationControllers/{controllerId}");
ClientResponse<?> res = request.pathParameter("controllerId", controllerId).delete();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Replication Controller ["+controllerId+"] deletion failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while retrieving Replication Controller info of Controller ID: "+controllerId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public Service getService(String serviceId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/{serviceId}");
ClientResponse<Service> res = request.pathParameter("serviceId", serviceId).get(Service.class);
if (res.getEntity() == null ) {
String msg = "Service ["+serviceId+"] doesn't exist.";
log.error(msg);
throw new KubernetesClientException(msg);
}
return res.getEntity();
} catch (Exception e) {
String msg = "Error while retrieving Service info with Service ID: "+serviceId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public ServiceList getAllServices() throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/");
ClientResponse<ServiceList> res = request.get(ServiceList.class);
if (res.getEntity() == null ) {
return new ServiceList();
}
ServiceList serviceList = new ServiceList();
serviceList.setItems(res.getEntity().getItems());
return serviceList;
} catch (Exception e) {
String msg = "Error while retrieving Services.";
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void createService(Service service) throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/");
ClientResponse<?> res = request.body("application/json", service).post();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Service ["+service+"] creation failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while creating the Service: "+service;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
@Override
public void deleteService(String serviceId)
throws KubernetesClientException {
try {
ClientRequest request = new ClientRequest(endpointUrl+"services/{serviceId}");
ClientResponse<?> res = request.pathParameter("serviceId", serviceId).delete();
if (res.getResponseStatus().getStatusCode() != HttpStatus.SC_ACCEPTED) {
String msg = "Service ["+serviceId+"] deletion failed. Error: "+
res.getResponseStatus().getReasonPhrase();
log.error(msg);
throw new KubernetesClientException(msg);
}
} catch (Exception e) {
String msg = "Error while retrieving Service info of Service ID: "+serviceId;
log.error(msg, e);
throw new KubernetesClientException(msg, e);
}
}
}
| Both SC_OK and SC_ACCEPTED is ok.
| src/main/java/org/apache/stratos/kubernetes/api/client/KubernetesApiClient.java | Both SC_OK and SC_ACCEPTED is ok. |
|
Java | apache-2.0 | 5c0b6c19d397f81bff5becefbdee4439f018667e | 0 | manolo/components,shahrzadmn/vaadin-grid,manolo/components,shahrzadmn/vaadin-grid,shahrzadmn/vaadin-grid,jforge/components,manolo/components,shahrzadmn/vaadin-grid,jforge/components,jforge/components | package com.vaadin.prototype.wc.gwt.client.widgets;
import static com.google.gwt.query.client.GQuery.$;
import static com.google.gwt.query.client.GQuery.Widgets;
import static com.google.gwt.query.client.GQuery.console;
import static com.google.gwt.query.client.GQuery.window;
import static com.vaadin.prototype.wc.gwt.client.widgets.WCUtils.getAttrIntValue;
import static com.vaadin.prototype.wc.gwt.client.widgets.WCUtils.getAttrValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.JsArrayInteger;
import com.google.gwt.core.client.JsArrayMixed;
import com.google.gwt.core.client.js.JsExport;
import com.google.gwt.core.client.js.JsNoExport;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.logical.shared.AttachEvent;
import com.google.gwt.event.logical.shared.AttachEvent.Handler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.query.client.Function;
import com.google.gwt.query.client.GQ;
import com.google.gwt.query.client.GQuery;
import com.google.gwt.query.client.Properties;
import com.google.gwt.query.client.js.JsUtils;
import com.google.gwt.query.client.plugin.Observe;
import com.google.gwt.query.client.plugin.Observe.MutationListener;
import com.google.gwt.query.client.plugin.Observe.MutationRecords.MutationRecord;
import com.google.gwt.regexp.shared.MatchResult;
import com.google.gwt.regexp.shared.RegExp;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.EventListener;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.Widget;
import com.vaadin.client.JsArrayObject;
import com.vaadin.client.data.DataSource;
import com.vaadin.client.renderers.Renderer;
import com.vaadin.client.widget.escalator.ColumnConfiguration;
import com.vaadin.client.widget.escalator.RowContainer;
import com.vaadin.client.widget.grid.RendererCellReference;
import com.vaadin.client.widget.grid.datasources.ListDataSource;
import com.vaadin.client.widget.grid.selection.SelectionEvent;
import com.vaadin.client.widget.grid.selection.SelectionHandler;
import com.vaadin.client.widget.grid.selection.SelectionModel;
import com.vaadin.client.widget.grid.selection.SelectionModelMulti;
import com.vaadin.client.widget.grid.selection.SelectionModelSingle;
import com.vaadin.client.widgets.Escalator;
import com.vaadin.client.widgets.Grid;
import com.vaadin.client.widgets.Grid.HeaderCell;
import com.vaadin.client.widgets.Grid.HeaderRow;
import com.vaadin.client.widgets.Grid.SelectionMode;
import com.vaadin.prototype.wc.gwt.client.html.HTMLElement;
import com.vaadin.prototype.wc.gwt.client.html.HTMLEvents;
import com.vaadin.prototype.wc.gwt.client.html.HTMLShadow;
import com.vaadin.prototype.wc.gwt.client.html.HTMLTableElement;
import com.vaadin.prototype.wc.gwt.client.ui.ElementResizeEvent;
import com.vaadin.prototype.wc.gwt.client.ui.ElementResizeListener;
import com.vaadin.prototype.wc.gwt.client.ui.ElementResizeManager;
import com.vaadin.prototype.wc.gwt.client.util.Elements;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData.GColumn;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData.GColumn.GHeader;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData.GColumn.GHeader.Format;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GDataSource;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GJsFuncDataSource;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GJsObjectDataSource;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GRestDataSource;
import com.vaadin.shared.ui.grid.GridState;
import com.vaadin.shared.ui.grid.HeightMode;
@JsExport
@JsType
public class WCVGrid extends HTMLTableElement.Prototype implements
HTMLElement.LifeCycle.Created, HTMLElement.LifeCycle.Attached,
HTMLElement.LifeCycle.Changed, ValueChangeHandler<Double>, Handler,
SelectionHandler<JsArrayMixed>, MutationListener {
public static final String TAG = "v-grid";
// FIXME: figure out a way to reuse grid.
private Grid<JsArrayMixed> grid;
private HTMLEvents selectEvent;
private HTMLElement container;
private HTMLElement style;
private boolean initialized = false;
public List<GColumn> cols;
private List<JsArrayMixed> vals;
private boolean changed = true;
// FIXME: using columns name here make this fail in prod mode
private List<Grid.Column<Object, JsArrayMixed>> gridColumns;
// We save the original content of the Light-DOM because polyfills remove it
private Observe lightDom;
// TODO: we should set this from JS among the datasource.
private int size = 0;
private int headerDefaultRowIndex = 0;
private ElementResizeListener resizeListener;
public WCVGrid() {
// FIXME: If there is no default constructor JsInterop does not export
// anything
}
@Override
public void createdCallback() {
style = Elements.create("style");
style.setAttribute("language", "text/css");
selectEvent = Elements.document.createEvent("HTMLEvents");
selectEvent.initEvent("select", false, false);
selectEvent.srcElement(this);
container = Elements.create("div");
cols = new ArrayList<GColumn>();
vals = new ArrayList<JsArrayMixed>();
gridColumns = new ArrayList<>();
grid = new Grid<JsArrayMixed>();
grid.addSelectionHandler(this);
}
/*
* TODO: common stuff for exporting other widgets
*/
private void initWidgetSystem() {
if (!initialized) {
lightDom = $(this)
// this is the table inside the v-grid
.children()
// hide it, otherwise it's visible if shadow is not used
.hide()
// observe all mutations in the table
.as(Observe.Observe)
.observe(
Observe.createInit().attributes(true)
.characterData(true).childList(true)
.subtree(true), this);
}
if (!initialized) {
initialized = true;
Widget elementWidget = $(this).widget();
if (elementWidget == null) {
elementWidget = $(this).as(Widgets).panel().widget();
}
elementWidget.addAttachHandler(this);
if (WCUtils.getAttrBooleanValue(this, "shadow", false)) {
HTMLShadow shadow = createShadowRoot();
shadow.appendChild(style);
shadow.appendChild(container);
} else {
appendChild(style);
appendChild(container);
}
Panel shadowPanel = $(container).as(Widgets).panel().widget();
shadowPanel.add(grid);
}
}
@Override
public void attachedCallback() {
initWidgetSystem();
readAttributes();
}
@JsNoExport
public void initGrid() {
if (!changed) {
return;
}
changed = false;
DataSource<JsArrayMixed> dataSource = null;
dataSource = grid.getDataSource();
if (grid.getSelectionModel() instanceof SelectionModelSingle
&& $(this).attr("selectionMode").equals("multi")) {
grid.setSelectionMode(SelectionMode.MULTI);
} else if (grid.getSelectionModel() instanceof SelectionModelMulti
&& !$(this).attr("selectionMode").equals("multi")) {
grid.setSelectionMode(SelectionMode.SINGLE);
}
while (gridColumns.size() > 0) {
grid.removeColumn(gridColumns.remove(0));
}
if (cols != null) {
for (int i = 0, l = cols.size(); i < l; i++) {
GColumn c = cols.get(i);
Grid.Column<Object, JsArrayMixed> col;
col = createGridColumn(c, i);
grid.addColumn(col);
gridColumns.add(col);
for (int j = 0; j < c.headerData().size(); j++) {
if (grid.getHeaderRowCount() < c.headerData().size()) {
grid.appendHeaderRow();
}
GHeader header = c.headerData().get(j);
int offset = 0;
for (int k = 0; k <= j + offset; k++) {
HeaderRow row = grid.getHeaderRow(k);
if (i != 0 &&
row.getCell(grid.getColumn(i-1)).getColspan() != 1) {
offset++;
}
}
HeaderCell cell = grid.getHeaderRow(j + offset)
.getCell(col);
cell.setColspan(header.colSpan());
Object content = header.content();
switch (header.format()) {
case HTML:
cell.setHtml((String) content);
break;
case WIDGET:
cell.setWidget((Widget) content);
break;
case TEXT:
cell.setText((String) content);
break;
}
}
}
grid.setDefaultHeaderRow(grid.getHeaderRow(headerDefaultRowIndex));
}
loadRows();
if (vals != null && !vals.isEmpty()) {
dataSource = new ListDataSource<JsArrayMixed>(vals);
}
if (dataSource != null) {
grid.setDataSource(dataSource);
}
// needed in case the style isn't loaded yet
resizeListener = ElementResizeManager.addResizeListener(
grid.getElement(), new ElementResizeListener() {
@Override
public void onElementResize(ElementResizeEvent event) {
int rowCount = size;
if (rowCount == 0) {
if (vals != null) {
rowCount = vals.size();
} else if (grid.getDataSource() != null) {
rowCount = grid.getDataSource().size();
}
}
adjustHeight(rowCount);
}
});
}
public static Grid.Column<Object, JsArrayMixed> createGridColumn(
final GColumn gColumn, final int idx) {
final RegExp templateRegexp = RegExp.compile("\\{\\{data\\}\\}", "ig");
return new Grid.Column<Object, JsArrayMixed>(new Renderer<Object>() {
public void render(RendererCellReference cell, Object data) {
Object o = gColumn.renderer();
Element elm = cell.getElement();
if (o instanceof JavaScriptObject) {
if (JsUtils.isFunction((JavaScriptObject) o)) {
JsUtils.runJavascriptFunction((JavaScriptObject) o,
"call", o, elm, data, cell.getRow());
} else {
if ($(elm).data("init") == null) {
$(elm).data("init", true);
JsUtils.runJavascriptFunction((JavaScriptObject) o,
"init", elm);
}
JsUtils.runJavascriptFunction((JavaScriptObject) o,
"render", elm, data);
}
} else {
if (gColumn.template() != null) {
// FIXME: this implementation doesn't
// reuse any of the possible HTML tags
// included in the template.
elm.setInnerHTML(templateRegexp.replace(
gColumn.template(), String.valueOf(data)));
} else {
elm.setInnerHTML(String.valueOf(data));
}
}
}
}) {
@Override
public Object getValue(JsArrayMixed row) {
Object o = gColumn.value();
if (o instanceof JavaScriptObject
&& JsUtils.isFunction((JavaScriptObject) o)) {
o = JsUtils.runJavascriptFunction((JavaScriptObject) o,
"call", o, row, idx);
} else if (o instanceof String) {
o = JsUtils.prop(row, o);
} else {
if (JsUtils.isArray(row)) {
o = row.getObject(idx);
} else {
Properties p = row.cast();
o = p.getObject(p.keys()[idx]);
}
}
return o;
}
};
}
@Override
public void onValueChange(ValueChangeEvent<Double> ev) {
}
@Override
public void attributeChangedCallback() {
if (!refreshing) {
readAttributes();
}
}
private void readAttributes() {
WCUtils.loadVaadinTheme(container, this, style, null, new Function() {
public void f() {
console.log("LOADED...");
}
});
loadHeaders();
loadRows();
initGrid();
parseAttributeDeclarations();
setSelectedRow(getAttrIntValue(this, "selectedRow", -1));
String type = getAttrValue(this, "type", null);
String url = getAttrValue(this, "url", null);
if ("ajax".equals(type) && url != null) {
Properties p = Properties.create();
p.set("url", url);
setDataSource(p);
}
// TODO be able to change the selection mode if
// attribute selectionMode change
}
private void parseAttributeDeclarations() {
String dataPath = getAttribute("dataSource");
RegExp regex = RegExp.compile("\\{\\{\\s*(\\w+)\\s*\\}\\}");
MatchResult match = regex.exec(dataPath);
if (match != null) {
JavaScriptObject jso = JsUtils.prop(window, match.getGroup(1));
if (JsUtils.isFunction(jso)) {
String count = getAttribute("rowCount");
match = regex.exec(count);
if (match != null) {
count = "" + JsUtils.prop(window, match.getGroup(1));
}
if (count != null && count.matches("[\\d\\.\\+]+")) {
setRowCount(Integer.valueOf(count));
}
setDataSource(jso);
} else if (JsUtils.isArray(jso)) {
vals = GQ.create(GData.class).<GData> set("values", jso)
.values();
loadData();
} else {
console.log("Unknown type of datasource: " + jso);
}
}
}
private String lastHeaders = null;
private void loadHeaders() {
GQuery $theadRows = lightDom.find("thead tr");
String txt = $theadRows.toString();
if ($theadRows.isEmpty() || txt.equals(lastHeaders)) {
return;
}
lastHeaders = txt;
List<GColumn> colList = new ArrayList<GColumn>();
Map<GColumn, List<GHeader>> contentsMap = new HashMap<GColumn, List<GHeader>>();
headerDefaultRowIndex = $theadRows.index(lightDom.find("tr[default]")
.get(0));
if (headerDefaultRowIndex == -1) {
headerDefaultRowIndex = 0;
}
for (int i = 0; i < $theadRows.size(); i++) {
GQuery $ths = $theadRows.eq(i).children("th");
while (colList.size() < $ths.size()) {
GColumn column = GQ.create(GColumn.class);
contentsMap.put(column, new ArrayList<GHeader>());
colList.add(column);
}
}
for (int i = 0; i < $theadRows.size(); i++) {
GQuery $ths = $theadRows.eq(i).children("th");
int colOffset = 0;
for (int j = 0; j < $ths.size(); j++) {
GColumn column = colList.get(j + colOffset);
GHeader header = GQ.create(GHeader.class);
GQuery $th = $ths.eq(j);
column.setValue($th.attr("name"));
int colSpan = 1;
String colString = $th.attr("colspan");
if (!colString.isEmpty()) {
colSpan = Integer.parseInt(colString);
colOffset += colSpan - 1;
}
// FIXME: Assuming format to be HTML, should we detect
// between simple text and HTML contents?
header.setColSpan(colSpan).setContent($th.html())
.setFormat(Format.HTML);
contentsMap.get(column).add(header);
}
}
Iterator<GColumn> iterator = contentsMap.keySet().iterator();
// When we don't use shadow, sometimes the component could
// be renderized previously.
lightDom.find("div[v-wc-container]").remove();
GQuery $templateRow = lightDom.find("tr[template] td");
for (int i = 0; iterator.hasNext(); i++) {
GColumn column = iterator.next();
column.setHeaderData(contentsMap.get(column));
if (i < $templateRow.size()) {
String html = $templateRow.eq(i).html();
column.setTemplate(html);
}
}
setCols(colList);
}
private void loadRows() {
GQuery $tr = lightDom.find("tbody tr:not([template])");
if (!$tr.isEmpty()) {
setVals(new ArrayList<JsArrayMixed>());
for (Element tr : $tr.elements()) {
JsArrayMixed a = JsArrayMixed.createArray().cast();
vals.add(a);
GQuery $td = $(tr).find("td");
for (int i = 0; i < $td.size(); i++) {
a.push($td.eq(i).html());
}
}
}
}
@Override
public void onAttachOrDetach(AttachEvent event) {
// TODO: Do something with shadowPanel, right now
// gQuery creates a new root-panel so it does not
// have any parent, but we should maintain the widget
// hierarchy someway.
}
@Override
public void onMutation(List<MutationRecord> mutations) {
readAttributes();
}
private void loadData() {
if (vals != null && !vals.isEmpty()) {
grid.setDataSource(new ListDataSource<JsArrayMixed>(vals));
}
}
@JsNoExport
public void setCols(List<GColumn> cols) {
changed = true;
this.cols = cols;
}
@JsNoExport
public List<GColumn> getCols() {
return cols;
}
private void setVals(List<JsArrayMixed> vals) {
changed = true;
this.vals = vals;
}
@JsNoExport
private void adjustHeight(int size) {
// TODO: fix this in Grid, it seems this only works with reindeer
if (Window.Location.getParameter("resize") != null && size > 0) {
this.size = size;
grid.setHeightMode(HeightMode.ROW);
grid.setHeightByRows(Math.min(size,
GridState.DEFAULT_HEIGHT_BY_ROWS));
}
}
@JsNoExport
public void adjustHeight() {
size = grid.getDataSource().size();
adjustHeight(size);
}
@JsNoExport
public Grid<JsArrayMixed> getGrid() {
if (grid == null) {
changed = true;
initGrid();
}
return grid;
}
@Override
public void onSelect(SelectionEvent<JsArrayMixed> ev) {
if (!refreshing) {
refreshing = true;
dispatchEvent(selectEvent);
setAttribute("selectedRow", ""
+ (getSelectedRow() < 0 ? "" : getSelectedRow()));
refreshing = false;
}
}
public void setColumnWidth(int column, int widht) {
grid.getColumn(column).setWidth(widht);
}
// TODO:
// @JsProperty seems not exporting these methods right now.
// We use a magic function name 'jsProperty...' to mark these methods as
// JS properties when mixing the prototype in Elements.
public void jsPropertyRowCount() {
};
@JsProperty
public void setRowCount(double rows) {
size = (int) rows;
adjustHeight(size);
}
@JsProperty
public double getRowCount() {
return size;
}
public void jsPropertyHeightMode() {
};
@JsProperty
public String getHeightMode() {
return grid.getHeightMode().toString();
}
@JsProperty
public void setHeightMode(String mode) {
grid.setHeightMode(HeightMode.valueOf(mode));
}
public void jsPropertyHeight() {
};
@JsProperty
public void setHeight(String height) {
grid.setHeight(height);
}
public void jsPropertyDataSource() {
};
@JsProperty
public void setDataSource(JavaScriptObject jso) {
if (JsUtils.isFunction(jso)) {
grid.setDataSource(new GJsFuncDataSource(jso, size, this));
} else if (JsUtils.isArray(jso)) {
loadHeaders();
grid.setDataSource(new GJsObjectDataSource(jso
.<JsArray<JavaScriptObject>> cast(), this));
} else if (JsUtils.prop(jso, "url") != null) {
loadHeaders();
@SuppressWarnings("unused")
GRestDataSource d = new GRestDataSource(jso, this);
} else {
throw new RuntimeException("Unknown jso: " + jso);
}
}
boolean refreshing = false;
public void refresh() {
if ((grid.getDataSource() instanceof GDataSource)) {
final int a = getSelectedRow();
((GDataSource) grid.getDataSource()).refresh();
if (a > 0) {
refreshing = true;
$(this).delay(5, new Function() {
@Override
public void f() {
setSelectedRow(a);
refreshing = false;
}
});
}
} else if (grid.getDataSource() != null) {
grid.setDataSource(grid.getDataSource());
}
}
@JsProperty
public JavaScriptObject getDataSource() {
return JavaScriptObject.createFunction();
}
public void jsPropertyColumns() {
};
// Array of JSO representing column configuration
// used in JS to change renderers.
private JsArrayObject<JavaScriptObject> columnsJso;
@JsProperty
public JavaScriptObject getColumns() {
// remove old observers
if (columnsJso != null) {
for (int i = 0, l = columnsJso.size(); i < l; i++) {
WCUtils.unobserve(columnsJso.get(i));
}
}
// Using GQuery data-binding magic to convert list to js arrays.
columnsJso = GQ.create(GData.class).setColumns(cols).get("columns");
// Add observers to any column configuration object so as
for (int i = 0, l = columnsJso.size(); i < l; i++) {
WCUtils.observe(columnsJso.get(i), new EventListener() {
public void onBrowserEvent(Event event) {
refresh();
}
});
}
return columnsJso;
}
@JsProperty
public void setColumns(JavaScriptObject newCols) {
changed = true;
cols = GQ.create(GData.class).<GData> set("columns", newCols).columns();
}
public void jsPropertySelectedRow() {
};
@JsProperty
public int getSelectedRow() {
return grid == null
|| grid.getSelectionModel() == null
|| !(grid.getSelectionModel() instanceof SelectionModel.Single<?>)
|| grid.getSelectedRow() == null ? -1 : grid.getDataSource()
.indexOf(grid.getSelectedRow());
}
@JsProperty
public void setSelectedRow(int idx) {
if (idx < 0 || idx >= grid.getDataSource().size()) {
if (getSelectedRow() >= 0) {
grid.deselect(grid.getDataSource().getRow(getSelectedRow()));
}
} else {
grid.select(grid.getDataSource().getRow(idx));
}
onSelect(null);
}
public void jsPropertySelectedRows() {
};
// Array of selected indexed returned to JS
// We observe it so as when JS makes changes we update
// grid selection.
private JsArrayInteger selectedJso;
private boolean selectedLock;
@JsProperty
public void setSelectedRows(JsArrayInteger arr) {
if (arr != selectedJso) {
WCUtils.unobserve(selectedJso);
}
selectedJso = arr;
selectedLock = true;
grid.getSelectionModel().reset();
for (int i = 0, l = selectedJso.length(); i < l; i++) {
grid.select(grid.getDataSource().getRow(selectedJso.get(i)));
}
selectedLock = false;
}
@JsProperty
public JsArrayInteger getSelectedRows() {
if (!selectedLock) {
if (selectedJso == null) {
selectedJso = JsArrayInteger.createArray().cast();
}
selectedJso.setLength(0);
Collection<JsArrayMixed> c = grid.getSelectedRows();
for (Iterator<JsArrayMixed> i = c.iterator(); i.hasNext();) {
selectedJso.push(grid.getDataSource().indexOf(i.next()));
}
WCUtils.unobserve(selectedJso);
WCUtils.observe(selectedJso, new EventListener() {
public void onBrowserEvent(Event event) {
setSelectedRows(selectedJso);
}
});
}
return selectedJso;
}
public void jsPropertyTheme() {
}
@JsProperty
public void setTheme(String value) {
setAttribute("theme", value);
}
@JsProperty
public void getTheme() {
getAttribute("theme");
}
public void redraw() {
Escalator e = e(grid);
c(e.getHeader());
c(e.getFooter());
c(e.getBody());
ColumnConfiguration columnConfiguration = f(e);
for (int i = 0; i < columnConfiguration.getColumnCount(); i++) {
columnConfiguration.setColumnWidth(i, columnConfiguration.getColumnWidth(i));
}
}
private static native Escalator e(Grid<?> g) /*-{
return [email protected]::escalator;
}-*/;
private static native Escalator c(RowContainer r) /*-{
[email protected]::defaultRowHeightShouldBeAutodetected = true;
[email protected]::autodetectRowHeightLater()();
}-*/;
private static native ColumnConfiguration f(Escalator e) /*-{
return [email protected]::columnConfiguration;
}-*/;
}
| wc-client/src/main/java/com/vaadin/prototype/wc/gwt/client/widgets/WCVGrid.java | package com.vaadin.prototype.wc.gwt.client.widgets;
import static com.google.gwt.query.client.GQuery.$;
import static com.google.gwt.query.client.GQuery.Widgets;
import static com.google.gwt.query.client.GQuery.console;
import static com.google.gwt.query.client.GQuery.window;
import static com.vaadin.prototype.wc.gwt.client.widgets.WCUtils.getAttrIntValue;
import static com.vaadin.prototype.wc.gwt.client.widgets.WCUtils.getAttrValue;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.core.client.JsArrayInteger;
import com.google.gwt.core.client.JsArrayMixed;
import com.google.gwt.core.client.js.JsExport;
import com.google.gwt.core.client.js.JsNoExport;
import com.google.gwt.core.client.js.JsProperty;
import com.google.gwt.core.client.js.JsType;
import com.google.gwt.dom.client.Element;
import com.google.gwt.event.logical.shared.AttachEvent;
import com.google.gwt.event.logical.shared.AttachEvent.Handler;
import com.google.gwt.event.logical.shared.ValueChangeEvent;
import com.google.gwt.event.logical.shared.ValueChangeHandler;
import com.google.gwt.query.client.Function;
import com.google.gwt.query.client.GQ;
import com.google.gwt.query.client.GQuery;
import com.google.gwt.query.client.Properties;
import com.google.gwt.query.client.js.JsUtils;
import com.google.gwt.query.client.plugin.Observe;
import com.google.gwt.query.client.plugin.Observe.MutationListener;
import com.google.gwt.query.client.plugin.Observe.MutationRecords.MutationRecord;
import com.google.gwt.regexp.shared.MatchResult;
import com.google.gwt.regexp.shared.RegExp;
import com.google.gwt.user.client.Event;
import com.google.gwt.user.client.EventListener;
import com.google.gwt.user.client.Window;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.Widget;
import com.vaadin.client.JsArrayObject;
import com.vaadin.client.data.DataSource;
import com.vaadin.client.renderers.Renderer;
import com.vaadin.client.widget.escalator.ColumnConfiguration;
import com.vaadin.client.widget.escalator.RowContainer;
import com.vaadin.client.widget.grid.RendererCellReference;
import com.vaadin.client.widget.grid.datasources.ListDataSource;
import com.vaadin.client.widget.grid.selection.SelectionEvent;
import com.vaadin.client.widget.grid.selection.SelectionHandler;
import com.vaadin.client.widget.grid.selection.SelectionModel;
import com.vaadin.client.widget.grid.selection.SelectionModelMulti;
import com.vaadin.client.widget.grid.selection.SelectionModelSingle;
import com.vaadin.client.widgets.Escalator;
import com.vaadin.client.widgets.Grid;
import com.vaadin.client.widgets.Grid.HeaderCell;
import com.vaadin.client.widgets.Grid.HeaderRow;
import com.vaadin.client.widgets.Grid.SelectionMode;
import com.vaadin.prototype.wc.gwt.client.html.HTMLElement;
import com.vaadin.prototype.wc.gwt.client.html.HTMLEvents;
import com.vaadin.prototype.wc.gwt.client.html.HTMLShadow;
import com.vaadin.prototype.wc.gwt.client.html.HTMLTableElement;
import com.vaadin.prototype.wc.gwt.client.ui.ElementResizeEvent;
import com.vaadin.prototype.wc.gwt.client.ui.ElementResizeListener;
import com.vaadin.prototype.wc.gwt.client.ui.ElementResizeManager;
import com.vaadin.prototype.wc.gwt.client.util.Elements;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData.GColumn;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData.GColumn.GHeader;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GData.GColumn.GHeader.Format;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GDataSource;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GJsFuncDataSource;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GJsObjectDataSource;
import com.vaadin.prototype.wc.gwt.client.widgets.grid.GRestDataSource;
import com.vaadin.shared.ui.grid.GridState;
import com.vaadin.shared.ui.grid.HeightMode;
@JsExport
@JsType
public class WCVGrid extends HTMLTableElement.Prototype implements
HTMLElement.LifeCycle.Created, HTMLElement.LifeCycle.Attached,
HTMLElement.LifeCycle.Changed, ValueChangeHandler<Double>, Handler,
SelectionHandler<JsArrayMixed>, MutationListener {
public static final String TAG = "v-grid";
// FIXME: figure out a way to reuse grid.
private Grid<JsArrayMixed> grid;
private HTMLEvents selectEvent;
private HTMLElement container;
private HTMLElement style;
private boolean initialized = false;
public List<GColumn> cols;
private List<JsArrayMixed> vals;
private boolean changed = true;
// FIXME: using columns name here make this fail in prod mode
private List<Grid.Column<Object, JsArrayMixed>> gridColumns;
// We save the original content of the Light-DOM because polyfills remove it
private Observe lightDom;
// TODO: we should set this from JS among the datasource.
private int size = 0;
private int headerDefaultRowIndex = 0;
private ElementResizeListener resizeListener;
public WCVGrid() {
// FIXME: If there is no default constructor JsInterop does not export
// anything
}
@Override
public void createdCallback() {
style = Elements.create("style");
style.setAttribute("language", "text/css");
selectEvent = Elements.document.createEvent("HTMLEvents");
selectEvent.initEvent("select", false, false);
selectEvent.srcElement(this);
container = Elements.create("div");
cols = new ArrayList<GColumn>();
vals = new ArrayList<JsArrayMixed>();
gridColumns = new ArrayList<>();
grid = new Grid<JsArrayMixed>();
grid.addSelectionHandler(this);
}
/*
* TODO: common stuff for exporting other widgets
*/
private void initWidgetSystem() {
if (!initialized) {
lightDom = $(this)
// this is the table inside the v-grid
.children()
// hide it, otherwise it's visible if shadow is not used
.hide()
// observe all mutations in the table
.as(Observe.Observe)
.observe(
Observe.createInit().attributes(true)
.characterData(true).childList(true)
.subtree(true), this);
}
if (!initialized) {
initialized = true;
Widget elementWidget = $(this).widget();
if (elementWidget == null) {
elementWidget = $(this).as(Widgets).panel().widget();
}
elementWidget.addAttachHandler(this);
if (WCUtils.getAttrBooleanValue(this, "shadow", false)) {
HTMLShadow shadow = createShadowRoot();
shadow.appendChild(style);
shadow.appendChild(container);
} else {
appendChild(style);
appendChild(container);
}
Panel shadowPanel = $(container).as(Widgets).panel().widget();
shadowPanel.add(grid);
}
}
@Override
public void attachedCallback() {
initWidgetSystem();
readAttributes();
}
@JsNoExport
public void initGrid() {
if (!changed) {
return;
}
changed = false;
DataSource<JsArrayMixed> dataSource = null;
dataSource = grid.getDataSource();
if (grid.getSelectionModel() instanceof SelectionModelSingle
&& $(this).attr("selectionMode").equals("multi")) {
grid.setSelectionMode(SelectionMode.MULTI);
} else if (grid.getSelectionModel() instanceof SelectionModelMulti
&& !$(this).attr("selectionMode").equals("multi")) {
grid.setSelectionMode(SelectionMode.SINGLE);
}
while (gridColumns.size() > 0) {
grid.removeColumn(gridColumns.remove(0));
}
if (cols != null) {
for (int i = 0, l = cols.size(); i < l; i++) {
GColumn c = cols.get(i);
Grid.Column<Object, JsArrayMixed> col;
col = createGridColumn(c, i);
grid.addColumn(col);
gridColumns.add(col);
for (int j = 0; j < c.headerData().size(); j++) {
if (grid.getHeaderRowCount() < c.headerData().size()) {
grid.appendHeaderRow();
}
GHeader header = c.headerData().get(j);
int offset = 0;
for (int k = 0; k <= j + offset; k++) {
HeaderRow row = grid.getHeaderRow(k);
if (row.getCell(grid.getColumn(i)).getColspan() != 1) {
offset++;
}
}
HeaderCell cell = grid.getHeaderRow(j + offset)
.getCell(col);
cell.setColspan(header.colSpan());
Object content = header.content();
switch (header.format()) {
case HTML:
cell.setHtml((String) content);
break;
case WIDGET:
cell.setWidget((Widget) content);
break;
case TEXT:
cell.setText((String) content);
break;
}
}
}
grid.setDefaultHeaderRow(grid.getHeaderRow(headerDefaultRowIndex));
}
loadRows();
if (vals != null && !vals.isEmpty()) {
dataSource = new ListDataSource<JsArrayMixed>(vals);
}
if (dataSource != null) {
grid.setDataSource(dataSource);
}
// needed in case the style isn't loaded yet
resizeListener = ElementResizeManager.addResizeListener(
grid.getElement(), new ElementResizeListener() {
@Override
public void onElementResize(ElementResizeEvent event) {
int rowCount = size;
if (rowCount == 0) {
if (vals != null) {
rowCount = vals.size();
} else if (grid.getDataSource() != null) {
rowCount = grid.getDataSource().size();
}
}
adjustHeight(rowCount);
}
});
}
public static Grid.Column<Object, JsArrayMixed> createGridColumn(
final GColumn gColumn, final int idx) {
final RegExp templateRegexp = RegExp.compile("\\{\\{data\\}\\}", "ig");
return new Grid.Column<Object, JsArrayMixed>(new Renderer<Object>() {
public void render(RendererCellReference cell, Object data) {
Object o = gColumn.renderer();
Element elm = cell.getElement();
if (o instanceof JavaScriptObject) {
if (JsUtils.isFunction((JavaScriptObject) o)) {
JsUtils.runJavascriptFunction((JavaScriptObject) o,
"call", o, elm, data, cell.getRow());
} else {
if ($(elm).data("init") == null) {
$(elm).data("init", true);
JsUtils.runJavascriptFunction((JavaScriptObject) o,
"init", elm);
}
JsUtils.runJavascriptFunction((JavaScriptObject) o,
"render", elm, data);
}
} else {
if (gColumn.template() != null) {
// FIXME: this implementation doesn't
// reuse any of the possible HTML tags
// included in the template.
elm.setInnerHTML(templateRegexp.replace(
gColumn.template(), String.valueOf(data)));
} else {
elm.setInnerHTML(String.valueOf(data));
}
}
}
}) {
@Override
public Object getValue(JsArrayMixed row) {
Object o = gColumn.value();
if (o instanceof JavaScriptObject
&& JsUtils.isFunction((JavaScriptObject) o)) {
o = JsUtils.runJavascriptFunction((JavaScriptObject) o,
"call", o, row, idx);
} else if (o instanceof String) {
o = JsUtils.prop(row, o);
} else {
if (JsUtils.isArray(row)) {
o = row.getObject(idx);
} else {
Properties p = row.cast();
o = p.getObject(p.keys()[idx]);
}
}
return o;
}
};
}
@Override
public void onValueChange(ValueChangeEvent<Double> ev) {
}
@Override
public void attributeChangedCallback() {
if (!refreshing) {
readAttributes();
}
}
private void readAttributes() {
WCUtils.loadVaadinTheme(container, this, style, null, new Function() {
public void f() {
console.log("LOADED...");
}
});
loadHeaders();
loadRows();
initGrid();
parseAttributeDeclarations();
setSelectedRow(getAttrIntValue(this, "selectedRow", -1));
String type = getAttrValue(this, "type", null);
String url = getAttrValue(this, "url", null);
if ("ajax".equals(type) && url != null) {
Properties p = Properties.create();
p.set("url", url);
setDataSource(p);
}
// TODO be able to change the selection mode if
// attribute selectionMode change
}
private void parseAttributeDeclarations() {
String dataPath = getAttribute("dataSource");
RegExp regex = RegExp.compile("\\{\\{\\s*(\\w+)\\s*\\}\\}");
MatchResult match = regex.exec(dataPath);
if (match != null) {
JavaScriptObject jso = JsUtils.prop(window, match.getGroup(1));
if (JsUtils.isFunction(jso)) {
String count = getAttribute("rowCount");
match = regex.exec(count);
if (match != null) {
count = "" + JsUtils.prop(window, match.getGroup(1));
}
if (count != null && count.matches("[\\d\\.\\+]+")) {
setRowCount(Integer.valueOf(count));
}
setDataSource(jso);
} else if (JsUtils.isArray(jso)) {
vals = GQ.create(GData.class).<GData> set("values", jso)
.values();
loadData();
} else {
console.log("Unknown type of datasource: " + jso);
}
}
}
private String lastHeaders = null;
private void loadHeaders() {
GQuery $theadRows = lightDom.find("thead tr");
String txt = $theadRows.toString();
if ($theadRows.isEmpty() || txt.equals(lastHeaders)) {
return;
}
lastHeaders = txt;
List<GColumn> colList = new ArrayList<GColumn>();
Map<GColumn, List<GHeader>> contentsMap = new HashMap<GColumn, List<GHeader>>();
headerDefaultRowIndex = $theadRows.index(lightDom.find("tr[default]")
.get(0));
if (headerDefaultRowIndex == -1) {
headerDefaultRowIndex = 0;
}
for (int i = 0; i < $theadRows.size(); i++) {
GQuery $ths = $theadRows.eq(i).children("th");
while (colList.size() < $ths.size()) {
GColumn column = GQ.create(GColumn.class);
contentsMap.put(column, new ArrayList<GHeader>());
colList.add(column);
}
}
for (int i = 0; i < $theadRows.size(); i++) {
GQuery $ths = $theadRows.eq(i).children("th");
int colOffset = 0;
for (int j = 0; j < $ths.size(); j++) {
GColumn column = colList.get(j + colOffset);
GHeader header = GQ.create(GHeader.class);
GQuery $th = $ths.eq(j);
column.setValue($th.attr("name"));
int colSpan = 1;
String colString = $th.attr("colspan");
if (!colString.isEmpty()) {
colSpan = Integer.parseInt(colString);
colOffset += colSpan - 1;
}
// FIXME: Assuming format to be HTML, should we detect
// between simple text and HTML contents?
header.setColSpan(colSpan).setContent($th.html())
.setFormat(Format.HTML);
contentsMap.get(column).add(header);
}
}
Iterator<GColumn> iterator = contentsMap.keySet().iterator();
// When we don't use shadow, sometimes the component could
// be renderized previously.
lightDom.find("div[v-wc-container]").remove();
GQuery $templateRow = lightDom.find("tr[template] td");
for (int i = 0; iterator.hasNext(); i++) {
GColumn column = iterator.next();
column.setHeaderData(contentsMap.get(column));
if (i < $templateRow.size()) {
String html = $templateRow.eq(i).html();
column.setTemplate(html);
}
}
setCols(colList);
}
private void loadRows() {
GQuery $tr = lightDom.find("tbody tr:not([template])");
if (!$tr.isEmpty()) {
setVals(new ArrayList<JsArrayMixed>());
for (Element tr : $tr.elements()) {
JsArrayMixed a = JsArrayMixed.createArray().cast();
vals.add(a);
GQuery $td = $(tr).find("td");
for (int i = 0; i < $td.size(); i++) {
a.push($td.eq(i).html());
}
}
}
}
@Override
public void onAttachOrDetach(AttachEvent event) {
// TODO: Do something with shadowPanel, right now
// gQuery creates a new root-panel so it does not
// have any parent, but we should maintain the widget
// hierarchy someway.
}
@Override
public void onMutation(List<MutationRecord> mutations) {
readAttributes();
}
private void loadData() {
if (vals != null && !vals.isEmpty()) {
grid.setDataSource(new ListDataSource<JsArrayMixed>(vals));
}
}
@JsNoExport
public void setCols(List<GColumn> cols) {
changed = true;
this.cols = cols;
}
@JsNoExport
public List<GColumn> getCols() {
return cols;
}
private void setVals(List<JsArrayMixed> vals) {
changed = true;
this.vals = vals;
}
@JsNoExport
private void adjustHeight(int size) {
// TODO: fix this in Grid, it seems this only works with reindeer
if (Window.Location.getParameter("resize") != null && size > 0) {
this.size = size;
grid.setHeightMode(HeightMode.ROW);
grid.setHeightByRows(Math.min(size,
GridState.DEFAULT_HEIGHT_BY_ROWS));
}
}
@JsNoExport
public void adjustHeight() {
size = grid.getDataSource().size();
adjustHeight(size);
}
@JsNoExport
public Grid<JsArrayMixed> getGrid() {
if (grid == null) {
changed = true;
initGrid();
}
return grid;
}
@Override
public void onSelect(SelectionEvent<JsArrayMixed> ev) {
if (!refreshing) {
refreshing = true;
dispatchEvent(selectEvent);
setAttribute("selectedRow", ""
+ (getSelectedRow() < 0 ? "" : getSelectedRow()));
refreshing = false;
}
}
public void setColumnWidth(int column, int widht) {
grid.getColumn(column).setWidth(widht);
}
// TODO:
// @JsProperty seems not exporting these methods right now.
// We use a magic function name 'jsProperty...' to mark these methods as
// JS properties when mixing the prototype in Elements.
public void jsPropertyRowCount() {
};
@JsProperty
public void setRowCount(double rows) {
size = (int) rows;
adjustHeight(size);
}
@JsProperty
public double getRowCount() {
return size;
}
public void jsPropertyHeightMode() {
};
@JsProperty
public String getHeightMode() {
return grid.getHeightMode().toString();
}
@JsProperty
public void setHeightMode(String mode) {
grid.setHeightMode(HeightMode.valueOf(mode));
}
public void jsPropertyHeight() {
};
@JsProperty
public void setHeight(String height) {
grid.setHeight(height);
}
public void jsPropertyDataSource() {
};
@JsProperty
public void setDataSource(JavaScriptObject jso) {
if (JsUtils.isFunction(jso)) {
grid.setDataSource(new GJsFuncDataSource(jso, size, this));
} else if (JsUtils.isArray(jso)) {
loadHeaders();
grid.setDataSource(new GJsObjectDataSource(jso
.<JsArray<JavaScriptObject>> cast(), this));
} else if (JsUtils.prop(jso, "url") != null) {
loadHeaders();
@SuppressWarnings("unused")
GRestDataSource d = new GRestDataSource(jso, this);
} else {
throw new RuntimeException("Unknown jso: " + jso);
}
}
boolean refreshing = false;
public void refresh() {
if ((grid.getDataSource() instanceof GDataSource)) {
final int a = getSelectedRow();
((GDataSource) grid.getDataSource()).refresh();
if (a > 0) {
refreshing = true;
$(this).delay(5, new Function() {
@Override
public void f() {
setSelectedRow(a);
refreshing = false;
}
});
}
} else if (grid.getDataSource() != null) {
grid.setDataSource(grid.getDataSource());
}
}
@JsProperty
public JavaScriptObject getDataSource() {
return JavaScriptObject.createFunction();
}
public void jsPropertyColumns() {
};
// Array of JSO representing column configuration
// used in JS to change renderers.
private JsArrayObject<JavaScriptObject> columnsJso;
@JsProperty
public JavaScriptObject getColumns() {
// remove old observers
if (columnsJso != null) {
for (int i = 0, l = columnsJso.size(); i < l; i++) {
WCUtils.unobserve(columnsJso.get(i));
}
}
// Using GQuery data-binding magic to convert list to js arrays.
columnsJso = GQ.create(GData.class).setColumns(cols).get("columns");
// Add observers to any column configuration object so as
for (int i = 0, l = columnsJso.size(); i < l; i++) {
WCUtils.observe(columnsJso.get(i), new EventListener() {
public void onBrowserEvent(Event event) {
refresh();
}
});
}
return columnsJso;
}
@JsProperty
public void setColumns(JavaScriptObject newCols) {
changed = true;
cols = GQ.create(GData.class).<GData> set("columns", newCols).columns();
}
public void jsPropertySelectedRow() {
};
@JsProperty
public int getSelectedRow() {
return grid == null
|| grid.getSelectionModel() == null
|| !(grid.getSelectionModel() instanceof SelectionModel.Single<?>)
|| grid.getSelectedRow() == null ? -1 : grid.getDataSource()
.indexOf(grid.getSelectedRow());
}
@JsProperty
public void setSelectedRow(int idx) {
if (idx < 0 || idx >= grid.getDataSource().size()) {
if (getSelectedRow() >= 0) {
grid.deselect(grid.getDataSource().getRow(getSelectedRow()));
}
} else {
grid.select(grid.getDataSource().getRow(idx));
}
onSelect(null);
}
public void jsPropertySelectedRows() {
};
// Array of selected indexed returned to JS
// We observe it so as when JS makes changes we update
// grid selection.
private JsArrayInteger selectedJso;
private boolean selectedLock;
@JsProperty
public void setSelectedRows(JsArrayInteger arr) {
if (arr != selectedJso) {
WCUtils.unobserve(selectedJso);
}
selectedJso = arr;
selectedLock = true;
grid.getSelectionModel().reset();
for (int i = 0, l = selectedJso.length(); i < l; i++) {
grid.select(grid.getDataSource().getRow(selectedJso.get(i)));
}
selectedLock = false;
}
@JsProperty
public JsArrayInteger getSelectedRows() {
if (!selectedLock) {
if (selectedJso == null) {
selectedJso = JsArrayInteger.createArray().cast();
}
selectedJso.setLength(0);
Collection<JsArrayMixed> c = grid.getSelectedRows();
for (Iterator<JsArrayMixed> i = c.iterator(); i.hasNext();) {
selectedJso.push(grid.getDataSource().indexOf(i.next()));
}
WCUtils.unobserve(selectedJso);
WCUtils.observe(selectedJso, new EventListener() {
public void onBrowserEvent(Event event) {
setSelectedRows(selectedJso);
}
});
}
return selectedJso;
}
public void jsPropertyTheme() {
}
@JsProperty
public void setTheme(String value) {
setAttribute("theme", value);
}
@JsProperty
public void getTheme() {
getAttribute("theme");
}
public void redraw() {
Escalator e = e(grid);
c(e.getHeader());
c(e.getFooter());
c(e.getBody());
ColumnConfiguration columnConfiguration = f(e);
for (int i = 0; i < columnConfiguration.getColumnCount(); i++) {
columnConfiguration.setColumnWidth(i, columnConfiguration.getColumnWidth(i));
}
}
private static native Escalator e(Grid<?> g) /*-{
return [email protected]::escalator;
}-*/;
private static native Escalator c(RowContainer r) /*-{
[email protected]::defaultRowHeightShouldBeAutodetected = true;
[email protected]::autodetectRowHeightLater()();
}-*/;
private static native ColumnConfiguration f(Escalator e) /*-{
return [email protected]::columnConfiguration;
}-*/;
}
| Fix v-grid colspan issues
Change-Id: If2e7c370c80015e01bba6c9905507145a145dcec
| wc-client/src/main/java/com/vaadin/prototype/wc/gwt/client/widgets/WCVGrid.java | Fix v-grid colspan issues |
|
Java | apache-2.0 | aba27c6d027a4e35c3f72781d96e3c9e154ab5e9 | 0 | HubSpot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,hs-jenkins-bot/Singularity,HubSpot/Singularity | package com.hubspot.singularity.s3.base;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.slf4j.Logger;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.S3Object;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.hubspot.deploy.S3Artifact;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.runner.base.sentry.SingularityRunnerExceptionNotifier;
import com.hubspot.singularity.s3.base.config.SingularityS3Configuration;
public class S3ArtifactDownloader {
private final Logger log;
private final SingularityS3Configuration configuration;
private final SingularityRunnerExceptionNotifier exceptionNotifier;
public S3ArtifactDownloader(SingularityS3Configuration configuration, Logger log, SingularityRunnerExceptionNotifier exceptionNotifier) {
this.configuration = configuration;
this.log = log;
this.exceptionNotifier = exceptionNotifier;
}
public void download(S3Artifact s3Artifact, Path downloadTo) {
final long start = System.currentTimeMillis();
boolean success = false;
try {
downloadThrows(s3Artifact, downloadTo);
success = true;
} catch (Throwable t) {
throw Throwables.propagate(t);
} finally {
log.info("S3 Download {}/{} finished {} after {}", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey(), success ? "successfully" : "with error", JavaUtils.duration(start));
}
}
private BasicAWSCredentials getCredentialsForBucket(String bucketName) {
if (configuration.getS3BucketCredentials().containsKey(bucketName)) {
return configuration.getS3BucketCredentials().get(bucketName).toAWSCredentials();
}
return new BasicAWSCredentials(configuration.getS3AccessKey().get(), configuration.getS3SecretKey().get());
}
private void downloadThrows(final S3Artifact s3Artifact, final Path downloadTo) throws Exception {
log.info("Downloading {}", s3Artifact);
ClientConfiguration clientConfiguration = new ClientConfiguration()
.withSocketTimeout(configuration.getS3ChunkDownloadTimeoutMillis());
final AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withCredentials(new AWSStaticCredentialsProvider(getCredentialsForBucket(s3Artifact.getS3Bucket())))
.withClientConfiguration(clientConfiguration)
.withPathStyleAccessEnabled(configuration.isS3PathStyleAccessEnabled())
.build();
if (configuration.getS3Endpoint().isPresent()) {
s3Client.setEndpoint(configuration.getS3Endpoint().get());
s3Client.setRegion(Region.getRegion(Regions.US_EAST_1)); // hardcode for now
}
long length = 0;
if (s3Artifact.getFilesize().isPresent()) {
length = s3Artifact.getFilesize().get();
} else {
S3Object details = s3Client.getObject(s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
Preconditions.checkNotNull(details, "Couldn't find object at %s/%s", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
length = details.getObjectMetadata().getContentLength();
}
int numChunks = (int) (length / configuration.getS3ChunkSize());
if (length % configuration.getS3ChunkSize() > 0) {
numChunks++;
}
final long chunkSize = length / numChunks + (length % numChunks);
log.info("Downloading {}/{} in {} chunks of {} bytes to {}", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey(), numChunks, chunkSize, downloadTo);
final ExecutorService chunkExecutorService = Executors.newFixedThreadPool(numChunks, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("S3ArtifactDownloaderChunkThread-%d").build());
final List<Future<Path>> futures = Lists.newArrayListWithCapacity(numChunks);
for (int chunk = 0; chunk < numChunks; chunk++) {
futures.add(chunkExecutorService.submit(new S3ArtifactChunkDownloader(configuration, log, s3Client, s3Artifact, downloadTo, chunk, chunkSize, length, exceptionNotifier)));
}
long remainingMillis = configuration.getS3DownloadTimeoutMillis();
boolean failed = false;
for (int chunk = 0; chunk < numChunks; chunk++) {
final Future<Path> future = futures.get(chunk);
if (failed) {
future.cancel(true);
continue;
}
final long start = System.currentTimeMillis();
if (!handleChunk(s3Artifact, future, downloadTo, chunk, start, remainingMillis)) {
failed = true;
}
remainingMillis -= (System.currentTimeMillis() - start);
}
chunkExecutorService.shutdownNow();
Preconditions.checkState(!failed, "Downloading %s/%s failed", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
}
private boolean handleChunk(S3Artifact s3Artifact, Future<Path> future, Path downloadTo, int chunk, long start, long remainingMillis) {
if (remainingMillis <= 0) {
remainingMillis = 1;
}
try {
Path path = future.get(remainingMillis, TimeUnit.MILLISECONDS);
if (chunk > 0) {
combineChunk(downloadTo, path);
}
return true;
} catch (TimeoutException te) {
log.error("Chunk {} for {} timed out after {} - had {} remaining", chunk, s3Artifact.getFilename(), JavaUtils.duration(start), JavaUtils.durationFromMillis(remainingMillis));
future.cancel(true);
exceptionNotifier.notify("TimeoutException during download", te, ImmutableMap.of("filename", s3Artifact.getFilename(), "chunk", Integer.toString(chunk)));
} catch (Throwable t) {
log.error("Error while handling chunk {} for {}", chunk, s3Artifact.getFilename(), t);
exceptionNotifier.notify(String.format("Error handling chunk (%s)", t.getMessage()), t, ImmutableMap.of("filename", s3Artifact.getFilename(), "chunk", Integer.toString(chunk)));
}
return false;
}
private void combineChunk(Path downloadTo, Path path) throws Exception {
final long start = System.currentTimeMillis();
long bytes = 0;
log.info("Writing {} to {}", path, downloadTo);
try (WritableByteChannel wbs = Files.newByteChannel(downloadTo, EnumSet.of(StandardOpenOption.APPEND, StandardOpenOption.WRITE))) {
try (FileChannel readChannel = FileChannel.open(path, EnumSet.of(StandardOpenOption.READ, StandardOpenOption.DELETE_ON_CLOSE))) {
bytes = readChannel.size();
readChannel.transferTo(0, bytes, wbs);
}
}
log.info("Finished writing {} bytes in {}", bytes, JavaUtils.duration(start));
}
}
| SingularityS3Base/src/main/java/com/hubspot/singularity/s3/base/S3ArtifactDownloader.java | package com.hubspot.singularity.s3.base;
import java.nio.channels.FileChannel;
import java.nio.channels.WritableByteChannel;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardOpenOption;
import java.util.EnumSet;
import java.util.List;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import org.slf4j.Logger;
import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import com.amazonaws.services.s3.model.S3Object;
import com.google.common.base.Preconditions;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Lists;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.hubspot.deploy.S3Artifact;
import com.hubspot.mesos.JavaUtils;
import com.hubspot.singularity.runner.base.sentry.SingularityRunnerExceptionNotifier;
import com.hubspot.singularity.s3.base.config.SingularityS3Configuration;
public class S3ArtifactDownloader {
private final Logger log;
private final SingularityS3Configuration configuration;
private final SingularityRunnerExceptionNotifier exceptionNotifier;
public S3ArtifactDownloader(SingularityS3Configuration configuration, Logger log, SingularityRunnerExceptionNotifier exceptionNotifier) {
this.configuration = configuration;
this.log = log;
this.exceptionNotifier = exceptionNotifier;
}
public void download(S3Artifact s3Artifact, Path downloadTo) {
final long start = System.currentTimeMillis();
boolean success = false;
try {
downloadThrows(s3Artifact, downloadTo);
success = true;
} catch (Throwable t) {
throw Throwables.propagate(t);
} finally {
log.info("S3 Download {}/{} finished {} after {}", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey(), success ? "successfully" : "with error", JavaUtils.duration(start));
}
}
private BasicAWSCredentials getCredentialsForBucket(String bucketName) {
if (configuration.getS3BucketCredentials().containsKey(bucketName)) {
return configuration.getS3BucketCredentials().get(bucketName).toAWSCredentials();
}
return new BasicAWSCredentials(configuration.getS3AccessKey().get(), configuration.getS3SecretKey().get());
}
private void downloadThrows(final S3Artifact s3Artifact, final Path downloadTo) throws Exception {
log.info("Downloading {}", s3Artifact);
ClientConfiguration clientConfiguration = new ClientConfiguration()
.withSocketTimeout(configuration.getS3ChunkDownloadTimeoutMillis());
final AmazonS3 s3Client = AmazonS3ClientBuilder.standard()
.withRegion(Regions.US_EAST_1) // hardcode for now
.withCredentials(new AWSStaticCredentialsProvider(getCredentialsForBucket(s3Artifact.getS3Bucket())))
.withClientConfiguration(clientConfiguration)
.withPathStyleAccessEnabled(configuration.isS3PathStyleAccessEnabled())
.build();
if (configuration.getS3Endpoint().isPresent()) {
s3Client.setEndpoint(configuration.getS3Endpoint().get());
}
long length = 0;
if (s3Artifact.getFilesize().isPresent()) {
length = s3Artifact.getFilesize().get();
} else {
S3Object details = s3Client.getObject(s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
Preconditions.checkNotNull(details, "Couldn't find object at %s/%s", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
length = details.getObjectMetadata().getContentLength();
}
int numChunks = (int) (length / configuration.getS3ChunkSize());
if (length % configuration.getS3ChunkSize() > 0) {
numChunks++;
}
final long chunkSize = length / numChunks + (length % numChunks);
log.info("Downloading {}/{} in {} chunks of {} bytes to {}", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey(), numChunks, chunkSize, downloadTo);
final ExecutorService chunkExecutorService = Executors.newFixedThreadPool(numChunks, new ThreadFactoryBuilder().setDaemon(true).setNameFormat("S3ArtifactDownloaderChunkThread-%d").build());
final List<Future<Path>> futures = Lists.newArrayListWithCapacity(numChunks);
for (int chunk = 0; chunk < numChunks; chunk++) {
futures.add(chunkExecutorService.submit(new S3ArtifactChunkDownloader(configuration, log, s3Client, s3Artifact, downloadTo, chunk, chunkSize, length, exceptionNotifier)));
}
long remainingMillis = configuration.getS3DownloadTimeoutMillis();
boolean failed = false;
for (int chunk = 0; chunk < numChunks; chunk++) {
final Future<Path> future = futures.get(chunk);
if (failed) {
future.cancel(true);
continue;
}
final long start = System.currentTimeMillis();
if (!handleChunk(s3Artifact, future, downloadTo, chunk, start, remainingMillis)) {
failed = true;
}
remainingMillis -= (System.currentTimeMillis() - start);
}
chunkExecutorService.shutdownNow();
Preconditions.checkState(!failed, "Downloading %s/%s failed", s3Artifact.getS3Bucket(), s3Artifact.getS3ObjectKey());
}
private boolean handleChunk(S3Artifact s3Artifact, Future<Path> future, Path downloadTo, int chunk, long start, long remainingMillis) {
if (remainingMillis <= 0) {
remainingMillis = 1;
}
try {
Path path = future.get(remainingMillis, TimeUnit.MILLISECONDS);
if (chunk > 0) {
combineChunk(downloadTo, path);
}
return true;
} catch (TimeoutException te) {
log.error("Chunk {} for {} timed out after {} - had {} remaining", chunk, s3Artifact.getFilename(), JavaUtils.duration(start), JavaUtils.durationFromMillis(remainingMillis));
future.cancel(true);
exceptionNotifier.notify("TimeoutException during download", te, ImmutableMap.of("filename", s3Artifact.getFilename(), "chunk", Integer.toString(chunk)));
} catch (Throwable t) {
log.error("Error while handling chunk {} for {}", chunk, s3Artifact.getFilename(), t);
exceptionNotifier.notify(String.format("Error handling chunk (%s)", t.getMessage()), t, ImmutableMap.of("filename", s3Artifact.getFilename(), "chunk", Integer.toString(chunk)));
}
return false;
}
private void combineChunk(Path downloadTo, Path path) throws Exception {
final long start = System.currentTimeMillis();
long bytes = 0;
log.info("Writing {} to {}", path, downloadTo);
try (WritableByteChannel wbs = Files.newByteChannel(downloadTo, EnumSet.of(StandardOpenOption.APPEND, StandardOpenOption.WRITE))) {
try (FileChannel readChannel = FileChannel.open(path, EnumSet.of(StandardOpenOption.READ, StandardOpenOption.DELETE_ON_CLOSE))) {
bytes = readChannel.size();
readChannel.transferTo(0, bytes, wbs);
}
}
log.info("Finished writing {} bytes in {}", bytes, JavaUtils.duration(start));
}
}
| Revert "move"
This reverts commit 27ffd802ba3f0b6a24adadcf0cf3e9d38bdd0704.
| SingularityS3Base/src/main/java/com/hubspot/singularity/s3/base/S3ArtifactDownloader.java | Revert "move" |
|
Java | apache-2.0 | 5f86f031febbfee1d85870556a11419f826e32ef | 0 | JeffLi1993/java-core-learning-example | package org.javacore.img;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.geom.Ellipse2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
/*
* Copyright [2015] [Jeff Lee]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Jeff Lee
* @since 2016-06-28 14:05:26
* 图片裁成椭圆
*/
public class ImgCircleCut {
public static boolean ImgCircleCut(String srcFile, String targetFile) {
try {
// 获取img的BufferedImage对象,可以考虑创建不带透明色的BufferedImage对象:BufferedImage.TYPE_INT_ARGB
BufferedImage srcBi = ImageIO.read(new File(srcFile));
// 创建一个带透明色的BufferedImage对象
BufferedImage targetBi = new BufferedImage(srcBi.getWidth(), srcBi.getHeight(),
BufferedImage.TYPE_INT_ARGB);
// 获取img窗体矩形定义的椭圆
Ellipse2D.Double shape = new Ellipse2D.Double(0, 0,
srcBi.getWidth(), srcBi.getHeight());
// 创建目标图的Graphics2D对象
Graphics2D g2 = targetBi.createGraphics();
// 创建不透明 SRC_OVER 规则的 AlphaComposite 对象
AlphaComposite ac = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.9f);
g2.setComposite(ac);
g2.setBackground(new Color(22, 2, 2, 0));
// 是圆形 还是 椭圆 自定义参数
g2.fill3DRect(200, 200, 180, 80, false);
g2.setClip(shape);
g2.drawImage(srcBi, 0, 0, null);
g2.dispose();
ImageIO.write(targetBi, "png", new File(targetFile));
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
}
| src/org/javacore/img/ImgCircleCut.java | package org.javacore.img;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.geom.Ellipse2D;
import java.awt.image.BufferedImage;
import java.io.File;
import java.io.IOException;
/*
* Copyright [2015] [Jeff Lee]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Jeff Lee
* @since 2016-06-28 14:05:26
* 图片裁成椭圆
*/
public class ImgCircleCut {
public static boolean ImgCircleCut(String srcFile, String targetFile) {
try {
// 获取img的BufferedImage对象,可以考虑创建不带透明色的BufferedImage对象:BufferedImage.TYPE_INT_ARGB
BufferedImage srcBi = ImageIO.read(new File(srcFile));
// 创建一个带透明色的BufferedImage对象
BufferedImage targetBi = new BufferedImage(srcBi.getWidth(), srcBi.getHeight(),
BufferedImage.TYPE_INT_ARGB);
// 获取img窗体矩形定义的椭圆
Ellipse2D.Double shape = new Ellipse2D.Double(0, 0,
srcBi.getWidth(), srcBi.getHeight());
// 创建目标图的Graphics2D对象
Graphics2D g2 = targetBi.createGraphics();
// 创建不透明 SRC_OVER 规则的 AlphaComposite 对象
AlphaComposite ac = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.9f);
g2.setComposite(ac);
g2.setBackground(new Color(22, 2, 2, 0));
// 是圆形 还是 椭圆 自定义参数
g2.fill3DRect(200, 200, 180, 80, false);
g2.setClip(shape);
g2.drawImage(srcBi, 0, 0, null);
g2.dispose();
ImageIO.write(targetBi, "png", new File(targetFile));
} catch (IOException e) {
e.printStackTrace();
return false;
}
return true;
}
public static void main(String[] args) {
ImgCircleCut("/jee/java-core-learning-example/src/resources/6890948.png","/jee/java-core-learning-example/src/resources/111.png");
}
}
| 1. 图片裁剪工具 - 椭圆
#bysocket
| src/org/javacore/img/ImgCircleCut.java | 1. 图片裁剪工具 - 椭圆 #bysocket |
|
Java | apache-2.0 | 58bede15cb877893d695993ebd75a37e43d2f32d | 0 | foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2,jacksonic/vjlofvhjfgm,foam-framework/foam2 | /**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.nanos.dig;
import foam.core.*;
import foam.dao.AbstractSink;
import foam.dao.ArraySink;
import foam.dao.DAO;
import foam.lib.csv.CSVSupport;
import foam.lib.json.JSONParser;
import foam.lib.json.OutputterMode;
import foam.lib.parse.*;
import foam.mlang.MLang;
import foam.mlang.predicate.Predicate;
import foam.nanos.boot.NSpec;
import foam.nanos.dig.exception.*;
import foam.nanos.http.*;
import foam.nanos.logger.Logger;
import foam.nanos.logger.PrefixLogger;
import foam.nanos.notification.email.EmailMessage;
import foam.nanos.notification.email.EmailService;
import foam.nanos.pm.PM;
import foam.util.SafetyUtil;
import javax.servlet.http.HttpServletResponse;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamReader;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.nio.CharBuffer;
import java.util.Iterator;
import java.util.List;
import java.lang.Exception;
import java.util.StringTokenizer;
public class DigWebAgent
implements WebAgent
{
public DigWebAgent() {}
public void execute(X x) {
Logger logger = (Logger) x.get("logger");
HttpServletResponse resp = x.get(HttpServletResponse.class);
HttpParameters p = x.get(HttpParameters.class);
PrintWriter out = x.get(PrintWriter.class);
CharBuffer buffer_ = CharBuffer.allocate(65535);
String data = p.getParameter("data");
String daoName = p.getParameter("dao");
Command command = (Command) p.get(Command.class);
Format format = (Format) p.get(Format.class);
String id = p.getParameter("id");
String q = p.getParameter("q");
DAO nSpecDAO = (DAO) x.get("AuthenticatedNSpecDAO");
String[] email = p.getParameterValues("email");
boolean emailSet = email != null && email.length > 0 && ! SafetyUtil.isEmpty(email[0]);
String subject = p.getParameter("subject");
//
// FIXME/TODO: ensuring XML and CSV flows return proper response objects and codes has not been completed since the switch to HttpParameters.
//
PM pm = new PM(getClass(), command.getName()+'/'+format.getName());
logger = new PrefixLogger(new Object[] { this.getClass().getSimpleName() }, logger);
try {
if ( SafetyUtil.isEmpty(daoName) ) {
resp.setContentType("text/html");
// FIXME: Presently the dig UI doesn't have any way to submit/send a request.
// String url = "/#dig";
// try {
// resp.sendRedirect(url);
// } catch ( java.io.IOException e ) {
// logger.error("Failed to redirect to", url, e);
// }
return;
}
DAO dao = (DAO) x.get(daoName);
if ( dao == null ) {
DigErrorMessage error = new DAONotFoundException.Builder(x)
.setMessage("DAO not found: " + daoName)
.build();
outputException(x, resp, format, out, error);
return;
}
dao = dao.inX(x);
FObject obj = null;
ClassInfo cInfo = dao.getOf();
Class objClass = cInfo.getObjClass();
Predicate pred = new WebAgentQueryParser(cInfo).parse(x, q);
logger.debug("predicate", pred.getClass(), pred.toString());
dao = dao.where(pred);
if ( Command.put == command ) {
String returnMessage = "success";
if ( Format.JSON == format ) {
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
// let FObjectArray parse first
if ( SafetyUtil.isEmpty(data) ) {
DigErrorMessage error = new EmptyDataException.Builder(x)
.build();
outputException(x, resp, format, out, error);
return;
}
try {
Object o = jsonParser.parseStringForArray(data, objClass);
Object o1 = jsonParser.parseString(data, objClass);
if ( o == null && o1 == null ) {
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid JSON Format")
.build();
outputException(x, resp, format, out, error);
return;
}
if ( o == null )
o = o1;
if ( o instanceof Object[] ) {
Object[] objs = (Object[]) o;
for ( int j = 0 ; j < objs.length ; j++ ) {
obj = (FObject) objs[j];
dao.put(obj);
}
} else {
obj = (FObject) o;
obj = dao.put(obj);
}
outputterJson.output(o);
out.println(outputterJson);
resp.setStatus(HttpServletResponse.SC_OK);
return;
} catch (Exception e) {
logger.error(e);
DigErrorMessage error = new DAOPutException.Builder(x)
.setMessage(e.getMessage())
.build();
outputException(x, resp, format, out, error);
return;
}
} else if ( Format.XML == format ) {
XMLSupport xmlSupport = new XMLSupport();
XMLInputFactory factory = XMLInputFactory.newInstance();
StringReader reader = new StringReader(data.toString());
XMLStreamReader xmlReader = factory.createXMLStreamReader(reader);
List<FObject> objList = xmlSupport.fromXML(x, xmlReader, objClass);
if ( objList.size() == 0 ) {
String message = getParsingError(x, buffer_.toString());
logger.error(message + ", input: " + buffer_.toString());
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid XML Format")
.build();
outputException(x, resp, format, out, error);
return;
}
Iterator i = objList.iterator();
while ( i.hasNext() ) {
obj = (FObject)i.next();
obj = dao.put(obj);
}
//returnMessage = "<objects>" + success + "</objects>";
} else if ( Format.CSV == format ) {
CSVSupport csvSupport = new CSVSupport();
csvSupport.setX(x);
// convert String into InputStream
InputStream is = new ByteArrayInputStream(data.toString().getBytes());
ArraySink arraySink = new ArraySink();
csvSupport.inputCSV(is, arraySink, cInfo);
List list = arraySink.getArray();
if ( list.size() == 0 ) {
String message = getParsingError(x, buffer_.toString());
logger.error(message + ", input: " + buffer_.toString());
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid CSV Format")
.build();
outputException(x, resp, format, out, error);
return;
}
for ( int i = 0 ; i < list.size() ; i++ ) {
dao.put((FObject) list.get(i));
}
} else if ( Format.HTML == format ) {
DigErrorMessage error = new UnsupportException.Builder(x)
.setMessage("Unsupported Format: " + format)
.build();
outputException(x, resp, format, out, error);
return;
} else if (Format.JSONJ == format ) {
String dataJson = "[";
String dataJsonJ[] = data.split("\\r?\\n");
for (String i:dataJsonJ){
i = i.trim();
dataJson += i.substring(2, i.length()-1) + ',';
}
dataJson += "]";
// JSON part from above
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
// let FObjectArray parse first
if ( SafetyUtil.isEmpty(dataJson) ) {
DigErrorMessage error = new EmptyDataException.Builder(x)
.build();
outputException(x, resp, format, out, error);
return;
}
try {
Object o = jsonParser.parseStringForArray(dataJson, objClass);
Object o1 = jsonParser.parseString(dataJson, objClass);
if ( o == null && o1 == null ) {
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid JSONJ Format")
.build();
outputException(x, resp, format, out, error);
return;
}
if ( o == null )
o = o1;
if ( o instanceof Object[] ) {
Object[] objs = (Object[]) o;
for ( int j = 0 ; j < objs.length ; j++ ) {
obj = (FObject) objs[j];
dao.put(obj);
}
} else {
obj = (FObject) o;
obj = dao.put(obj);
}
outputterJson.output(o);
out.println(outputterJson);
resp.setStatus(HttpServletResponse.SC_OK);
return;
} catch (Exception e) {
logger.error(e);
DigErrorMessage error = new DAOPutException.Builder(x)
.setMessage(e.getMessage())
.build();
outputException(x, resp, format, out, error);
return;
}
}
out.println(returnMessage);
} else if ( Command.select == command ) {
PropertyInfo idProp = (PropertyInfo) cInfo.getAxiomByName("id");
ArraySink sink = (ArraySink) ( ! SafetyUtil.isEmpty(id) ?
dao.where(MLang.EQ(idProp, id)).select(new ArraySink()) :
dao.select(new ArraySink()));
if ( sink != null ) {
if ( sink.getArray().size() == 0 ) {
if (Format.XML == format) {
resp.setContentType("text/html");
}
out.println("[]");
resp.setStatus(HttpServletResponse.SC_OK);
return;
}
logger.debug(this.getClass().getSimpleName(), "objects selected: " + sink.getArray().size());
if ( Format.JSON == format ) {
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
outputterJson.output(sink.getArray().toArray());
//resp.setContentType("application/json");
if ( emailSet ) {
output(x, outputterJson.toString());
} else {
out.println(outputterJson.toString());
}
} else if ( Format.XML == format ) {
foam.lib.xml.Outputter outputterXml = new foam.lib.xml.Outputter(OutputterMode.NETWORK);
outputterXml.output(sink.getArray().toArray());
//resp.setContentType("application/xml");
if ( emailSet ) {
output(x, "<textarea style=\"width:700;height:400;\" rows=10 cols=120>" + outputterXml.toString() + "</textarea>");
} else {
out.println(outputterXml.toString());
}
} else if ( Format.CSV == format ) {
foam.lib.csv.Outputter outputterCsv = new foam.lib.csv.Outputter(OutputterMode.NETWORK);
outputterCsv.output(sink.getArray().toArray());
List a = sink.getArray();
for ( int i = 0; i < a.size(); i++ ) {
outputterCsv.put((FObject) a.get(i), null);
}
//resp.setContentType("text/plain");
//if ( email.length != 0 && ! email[0].equals("") && email[0] != null ) {
if ( emailSet ) {
output(x, outputterCsv.toString());
} else {
out.println(outputterCsv.toString());
}
} else if ( Format.HTML == format ) {
foam.lib.html.Outputter outputterHtml = new foam.lib.html.Outputter(OutputterMode.NETWORK);
outputterHtml.outputStartHtml();
outputterHtml.outputStartTable();
List a = sink.getArray();
for ( int i = 0; i < a.size(); i++ ) {
if ( i == 0 ) {
outputterHtml.outputHead((FObject) a.get(i));
}
outputterHtml.put((FObject) a.get(i), null);
}
outputterHtml.outputEndTable();
outputterHtml.outputEndHtml();
if ( emailSet ) {
output(x, outputterHtml.toString());
} else {
out.println(outputterHtml.toString());
}
} else if ( Format.JSONJ == format ) {
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.STORAGE);
List a = sink.getArray();
String dataToString = "";
//resp.setContentType("application/json");
for ( int i = 0 ; i < a.size() ; i++ )
outputterJson.outputJSONJFObject((FObject) a.get(i));
if ( emailSet ) {
output(x, dataToString);
} else {
out.println(outputterJson.toString());
}
}
} else {
if ( Format.XML == format ) {
resp.setContentType("text/html");
}
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Unsupported DAO : " + daoName)
.build();
outputException(x, resp, format, out, error);
return;
}
} else if ( Command.remove == command ) {
PropertyInfo idProp = (PropertyInfo) cInfo.getAxiomByName("id");
Object idObj = idProp.fromString(id);
FObject targetFobj = dao.find(idObj);
if ( targetFobj == null ) {
DigErrorMessage error = new UnknownIdException.Builder(x)
.build();
outputException(x, resp, format, out, error);
return;
} else {
dao.remove(targetFobj);
DigErrorMessage error = new DigSuccessMessage.Builder(x)
.setMessage("Success")
.build();
outputException(x, resp, format, out, error);
return;
}
} else {
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Unsupported method: "+command)
.build();
outputException(x, resp, format, out, error);
return;
}
out.println();
out.flush();
logger.debug(this.getClass().getSimpleName(), "success");
resp.setStatus(HttpServletResponse.SC_OK);
} catch (Throwable t) {
out.println("Error " + t);
out.println("<pre>");
t.printStackTrace(out);
out.println("</pre>");
t.printStackTrace();
logger.error(t);
try {
resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.toString());
} catch ( java.io.IOException e ) {
logger.error("Failed to send HttpServletResponse CODE", e);
}
} finally {
pm.log(x);
}
}
protected void output(X x, String data) {
HttpParameters p = x.get(HttpParameters.class);
String emailParam = p.getParameter("email");
String subject = p.getParameter("subject");
if ( SafetyUtil.isEmpty(emailParam) ) {
PrintWriter out = x.get(PrintWriter.class);
out.print(data);
} else {
EmailService emailService = (EmailService) x.get("email");
EmailMessage message = new EmailMessage();
// For multiple receiver
String[] email = emailParam.split(",");
if ( email.length > 0 ) message.setTo(email);
message.setSubject(subject);
String newData = data;
message.setBody(newData);
emailService.sendEmail(x, message);
}
}
/**
* Gets the result of a failing parsing of a buffer
* @param buffer the buffer that failed to be parsed
* @return the error message
*/
protected String getParsingError(X x, String buffer) {
Parser parser = new foam.lib.json.ExprParser();
PStream ps = new StringPStream();
ParserContext psx = new ParserContextImpl();
((StringPStream) ps).setString(buffer);
psx.set("X", x == null ? new ProxyX() : x);
ErrorReportingPStream eps = new ErrorReportingPStream(ps);
ps = eps.apply(parser, psx);
return eps.getMessage();
}
protected void outputException(X x, HttpServletResponse resp, Format format, PrintWriter out, DigErrorMessage error) {
resp.setStatus(Integer.parseInt(error.getStatus()));
if ( format == Format.JSON ) {
//output error in json format
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
outputterJson.output(error);
out.println(outputterJson.toString());
} else if ( format == Format.XML ) {
//output error in xml format
foam.lib.xml.Outputter outputterXml = new foam.lib.xml.Outputter(OutputterMode.NETWORK);
outputterXml.output(error);
out.println(outputterXml.toString());
} else if ( format == Format.CSV ) {
//output error in csv format
foam.lib.csv.Outputter outputterCsv = new foam.lib.csv.Outputter(OutputterMode.NETWORK);
outputterCsv.put(error, null);
out.println(outputterCsv.toString());
} else if ( format == Format.HTML ) {
foam.lib.html.Outputter outputterHtml = new foam.lib.html.Outputter(OutputterMode.NETWORK);
outputterHtml.outputStartHtml();
outputterHtml.outputStartTable();
outputterHtml.outputHead(error);
outputterHtml.put(error, null);
outputterHtml.outputEndTable();
outputterHtml.outputEndHtml();
out.println(outputterHtml.toString());
} else if ( format == Format.JSONJ ) {
//output error in jsonJ format
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.STORAGE);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
outputterJson.outputJSONJFObject(error);
out.println(outputterJson.toString());
} else {
// TODO
}
}
}
| src/foam/nanos/dig/DigWebAgent.java | /**
* @license
* Copyright 2017 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.nanos.dig;
import foam.core.*;
import foam.dao.AbstractSink;
import foam.dao.ArraySink;
import foam.dao.DAO;
import foam.lib.csv.CSVSupport;
import foam.lib.json.JSONParser;
import foam.lib.json.OutputterMode;
import foam.lib.parse.*;
import foam.mlang.MLang;
import foam.mlang.predicate.Predicate;
import foam.nanos.boot.NSpec;
import foam.nanos.dig.exception.*;
import foam.nanos.http.*;
import foam.nanos.logger.Logger;
import foam.nanos.logger.PrefixLogger;
import foam.nanos.notification.email.EmailMessage;
import foam.nanos.notification.email.EmailService;
import foam.nanos.pm.PM;
import foam.util.SafetyUtil;
import javax.servlet.http.HttpServletResponse;
import javax.xml.stream.XMLInputFactory;
import javax.xml.stream.XMLStreamReader;
import java.io.ByteArrayInputStream;
import java.io.InputStream;
import java.io.PrintWriter;
import java.io.StringReader;
import java.nio.CharBuffer;
import java.util.Iterator;
import java.util.List;
import java.lang.Exception;
import java.util.StringTokenizer;
public class DigWebAgent
implements WebAgent
{
public DigWebAgent() {}
public void execute(X x) {
Logger logger = (Logger) x.get("logger");
HttpServletResponse resp = x.get(HttpServletResponse.class);
HttpParameters p = x.get(HttpParameters.class);
PrintWriter out = x.get(PrintWriter.class);
CharBuffer buffer_ = CharBuffer.allocate(65535);
String data = p.getParameter("data");
String daoName = p.getParameter("dao");
Command command = (Command) p.get(Command.class);
Format format = (Format) p.get(Format.class);
String id = p.getParameter("id");
String q = p.getParameter("q");
DAO nSpecDAO = (DAO) x.get("AuthenticatedNSpecDAO");
String[] email = p.getParameterValues("email");
boolean emailSet = email != null && email.length > 0 && ! SafetyUtil.isEmpty(email[0]);
String subject = p.getParameter("subject");
//
// FIXME/TODO: ensuring XML and CSV flows return proper response objects and codes has not been completed since the switch to HttpParameters.
//
PM pm = new PM(getClass(), command.getName()+'/'+format.getName());
logger = new PrefixLogger(new Object[] { this.getClass().getSimpleName() }, logger);
try {
if ( SafetyUtil.isEmpty(daoName) ) {
resp.setContentType("text/html");
// FIXME: Presently the dig UI doesn't have any way to submit/send a request.
// String url = "/#dig";
// try {
// resp.sendRedirect(url);
// } catch ( java.io.IOException e ) {
// logger.error("Failed to redirect to", url, e);
// }
return;
}
DAO dao = (DAO) x.get(daoName);
if ( dao == null ) {
DigErrorMessage error = new DAONotFoundException.Builder(x)
.setMessage("DAO not found: " + daoName)
.build();
outputException(x, resp, format, out, error);
return;
}
dao = dao.inX(x);
FObject obj = null;
ClassInfo cInfo = dao.getOf();
Class objClass = cInfo.getObjClass();
Predicate pred = new WebAgentQueryParser(cInfo).parse(x, q);
logger.debug("predicate", pred.getClass(), pred.toString());
dao = dao.where(pred);
if ( Command.put == command ) {
String returnMessage = "success";
if ( Format.JSON == format ) {
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
// let FObjectArray parse first
if ( SafetyUtil.isEmpty(data) ) {
DigErrorMessage error = new EmptyDataException.Builder(x)
.build();
outputException(x, resp, format, out, error);
return;
}
try {
Object o = jsonParser.parseStringForArray(data, objClass);
Object o1 = jsonParser.parseString(data, objClass);
if ( o == null && o1 == null ) {
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid JSON Format")
.build();
outputException(x, resp, format, out, error);
return;
}
if ( o == null )
o = o1;
if ( o instanceof Object[] ) {
Object[] objs = (Object[]) o;
for ( int j = 0 ; j < objs.length ; j++ ) {
obj = (FObject) objs[j];
dao.put(obj);
}
} else {
obj = (FObject) o;
obj = dao.put(obj);
}
outputterJson.output(o);
out.println(outputterJson);
resp.setStatus(HttpServletResponse.SC_OK);
return;
} catch (Exception e) {
logger.error(e);
DigErrorMessage error = new DAOPutException.Builder(x)
.setMessage(e.getMessage())
.build();
outputException(x, resp, format, out, error);
return;
}
} else if ( Format.XML == format ) {
XMLSupport xmlSupport = new XMLSupport();
XMLInputFactory factory = XMLInputFactory.newInstance();
StringReader reader = new StringReader(data.toString());
XMLStreamReader xmlReader = factory.createXMLStreamReader(reader);
List<FObject> objList = xmlSupport.fromXML(x, xmlReader, objClass);
if ( objList.size() == 0 ) {
String message = getParsingError(x, buffer_.toString());
logger.error(message + ", input: " + buffer_.toString());
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid XML Format")
.build();
outputException(x, resp, format, out, error);
return;
}
Iterator i = objList.iterator();
while ( i.hasNext() ) {
obj = (FObject)i.next();
obj = dao.put(obj);
}
//returnMessage = "<objects>" + success + "</objects>";
} else if ( Format.CSV == format ) {
CSVSupport csvSupport = new CSVSupport();
csvSupport.setX(x);
// convert String into InputStream
InputStream is = new ByteArrayInputStream(data.toString().getBytes());
ArraySink arraySink = new ArraySink();
csvSupport.inputCSV(is, arraySink, cInfo);
List list = arraySink.getArray();
if ( list.size() == 0 ) {
String message = getParsingError(x, buffer_.toString());
logger.error(message + ", input: " + buffer_.toString());
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid CSV Format")
.build();
outputException(x, resp, format, out, error);
return;
}
for ( int i = 0 ; i < list.size() ; i++ ) {
dao.put((FObject) list.get(i));
}
} else if ( Format.HTML == format ) {
DigErrorMessage error = new UnsupportException.Builder(x)
.setMessage("Unsupported Format: " + format)
.build();
outputException(x, resp, format, out, error);
return;
} else if (Format.JSONJ == format ) {
String dataJson = "[";
String dataJsonJ[] = data.split("\\s*\\r?\\n");
for (String i:dataJsonJ){
if (i.startsWith("p(") && i.endsWith(")")) {
dataJson += i.substring(2, i.length()-1) + ',';
}
}
dataJson += "]";
// JSON part from above
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
// let FObjectArray parse first
if ( SafetyUtil.isEmpty(dataJson) ) {
DigErrorMessage error = new EmptyDataException.Builder(x)
.build();
outputException(x, resp, format, out, error);
return;
}
try {
Object o = jsonParser.parseStringForArray(dataJson, objClass);
Object o1 = jsonParser.parseString(dataJson, objClass);
if ( o == null && o1 == null ) {
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Invalid JSON Format")
.build();
outputException(x, resp, format, out, error);
return;
}
if ( o == null )
o = o1;
if ( o instanceof Object[] ) {
Object[] objs = (Object[]) o;
for ( int j = 0 ; j < objs.length ; j++ ) {
obj = (FObject) objs[j];
dao.put(obj);
}
} else {
obj = (FObject) o;
obj = dao.put(obj);
}
outputterJson.output(o);
out.println(outputterJson);
resp.setStatus(HttpServletResponse.SC_OK);
return;
} catch (Exception e) {
logger.error(e);
DigErrorMessage error = new DAOPutException.Builder(x)
.setMessage(e.getMessage())
.build();
outputException(x, resp, format, out, error);
return;
}
}
out.println(returnMessage);
} else if ( Command.select == command ) {
PropertyInfo idProp = (PropertyInfo) cInfo.getAxiomByName("id");
ArraySink sink = (ArraySink) ( ! SafetyUtil.isEmpty(id) ?
dao.where(MLang.EQ(idProp, id)).select(new ArraySink()) :
dao.select(new ArraySink()));
if ( sink != null ) {
if ( sink.getArray().size() == 0 ) {
if (Format.XML == format) {
resp.setContentType("text/html");
}
out.println("[]");
resp.setStatus(HttpServletResponse.SC_OK);
return;
}
logger.debug(this.getClass().getSimpleName(), "objects selected: " + sink.getArray().size());
if ( Format.JSON == format ) {
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
outputterJson.output(sink.getArray().toArray());
//resp.setContentType("application/json");
if ( emailSet ) {
output(x, outputterJson.toString());
} else {
out.println(outputterJson.toString());
}
} else if ( Format.XML == format ) {
foam.lib.xml.Outputter outputterXml = new foam.lib.xml.Outputter(OutputterMode.NETWORK);
outputterXml.output(sink.getArray().toArray());
//resp.setContentType("application/xml");
if ( emailSet ) {
output(x, "<textarea style=\"width:700;height:400;\" rows=10 cols=120>" + outputterXml.toString() + "</textarea>");
} else {
out.println(outputterXml.toString());
}
} else if ( Format.CSV == format ) {
foam.lib.csv.Outputter outputterCsv = new foam.lib.csv.Outputter(OutputterMode.NETWORK);
outputterCsv.output(sink.getArray().toArray());
List a = sink.getArray();
for ( int i = 0; i < a.size(); i++ ) {
outputterCsv.put((FObject) a.get(i), null);
}
//resp.setContentType("text/plain");
//if ( email.length != 0 && ! email[0].equals("") && email[0] != null ) {
if ( emailSet ) {
output(x, outputterCsv.toString());
} else {
out.println(outputterCsv.toString());
}
} else if ( Format.HTML == format ) {
foam.lib.html.Outputter outputterHtml = new foam.lib.html.Outputter(OutputterMode.NETWORK);
outputterHtml.outputStartHtml();
outputterHtml.outputStartTable();
List a = sink.getArray();
for ( int i = 0; i < a.size(); i++ ) {
if ( i == 0 ) {
outputterHtml.outputHead((FObject) a.get(i));
}
outputterHtml.put((FObject) a.get(i), null);
}
outputterHtml.outputEndTable();
outputterHtml.outputEndHtml();
if ( emailSet ) {
output(x, outputterHtml.toString());
} else {
out.println(outputterHtml.toString());
}
} else if ( Format.JSONJ == format ) {
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.STORAGE);
List a = sink.getArray();
String dataToString = "";
//resp.setContentType("application/json");
for ( int i = 0 ; i < a.size() ; i++ )
outputterJson.outputJSONJFObject((FObject) a.get(i));
if ( emailSet ) {
output(x, dataToString);
} else {
out.println(outputterJson.toString());
}
}
} else {
if ( Format.XML == format ) {
resp.setContentType("text/html");
}
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Unsupported DAO : " + daoName)
.build();
outputException(x, resp, format, out, error);
return;
}
} else if ( Command.remove == command ) {
PropertyInfo idProp = (PropertyInfo) cInfo.getAxiomByName("id");
Object idObj = idProp.fromString(id);
FObject targetFobj = dao.find(idObj);
if ( targetFobj == null ) {
DigErrorMessage error = new UnknownIdException.Builder(x)
.build();
outputException(x, resp, format, out, error);
return;
} else {
dao.remove(targetFobj);
DigErrorMessage error = new DigSuccessMessage.Builder(x)
.setMessage("Success")
.build();
outputException(x, resp, format, out, error);
return;
}
} else {
DigErrorMessage error = new ParsingErrorException.Builder(x)
.setMessage("Unsupported method: "+command)
.build();
outputException(x, resp, format, out, error);
return;
}
out.println();
out.flush();
logger.debug(this.getClass().getSimpleName(), "success");
resp.setStatus(HttpServletResponse.SC_OK);
} catch (Throwable t) {
out.println("Error " + t);
out.println("<pre>");
t.printStackTrace(out);
out.println("</pre>");
t.printStackTrace();
logger.error(t);
try {
resp.sendError(HttpServletResponse.SC_INTERNAL_SERVER_ERROR, t.toString());
} catch ( java.io.IOException e ) {
logger.error("Failed to send HttpServletResponse CODE", e);
}
} finally {
pm.log(x);
}
}
protected void output(X x, String data) {
HttpParameters p = x.get(HttpParameters.class);
String emailParam = p.getParameter("email");
String subject = p.getParameter("subject");
if ( SafetyUtil.isEmpty(emailParam) ) {
PrintWriter out = x.get(PrintWriter.class);
out.print(data);
} else {
EmailService emailService = (EmailService) x.get("email");
EmailMessage message = new EmailMessage();
// For multiple receiver
String[] email = emailParam.split(",");
if ( email.length > 0 ) message.setTo(email);
message.setSubject(subject);
String newData = data;
message.setBody(newData);
emailService.sendEmail(x, message);
}
}
/**
* Gets the result of a failing parsing of a buffer
* @param buffer the buffer that failed to be parsed
* @return the error message
*/
protected String getParsingError(X x, String buffer) {
Parser parser = new foam.lib.json.ExprParser();
PStream ps = new StringPStream();
ParserContext psx = new ParserContextImpl();
((StringPStream) ps).setString(buffer);
psx.set("X", x == null ? new ProxyX() : x);
ErrorReportingPStream eps = new ErrorReportingPStream(ps);
ps = eps.apply(parser, psx);
return eps.getMessage();
}
protected void outputException(X x, HttpServletResponse resp, Format format, PrintWriter out, DigErrorMessage error) {
resp.setStatus(Integer.parseInt(error.getStatus()));
if ( format == Format.JSON ) {
//output error in json format
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.NETWORK);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
outputterJson.output(error);
out.println(outputterJson.toString());
} else if ( format == Format.XML ) {
//output error in xml format
foam.lib.xml.Outputter outputterXml = new foam.lib.xml.Outputter(OutputterMode.NETWORK);
outputterXml.output(error);
out.println(outputterXml.toString());
} else if ( format == Format.CSV ) {
//output error in csv format
foam.lib.csv.Outputter outputterCsv = new foam.lib.csv.Outputter(OutputterMode.NETWORK);
outputterCsv.put(error, null);
out.println(outputterCsv.toString());
} else if ( format == Format.HTML ) {
foam.lib.html.Outputter outputterHtml = new foam.lib.html.Outputter(OutputterMode.NETWORK);
outputterHtml.outputStartHtml();
outputterHtml.outputStartTable();
outputterHtml.outputHead(error);
outputterHtml.put(error, null);
outputterHtml.outputEndTable();
outputterHtml.outputEndHtml();
out.println(outputterHtml.toString());
}else if ( format == Format.JSONJ ) {
//output error in jsonJ format
JSONParser jsonParser = new JSONParser();
jsonParser.setX(x);
foam.lib.json.Outputter outputterJson = new foam.lib.json.Outputter(OutputterMode.STORAGE);
outputterJson.setOutputDefaultValues(true);
outputterJson.setOutputClassNames(true);
outputterJson.outputJSONJFObject(error);
out.println(outputterJson.toString());
} else {
// TODO
}
}
}
| invalid json throws errors now
| src/foam/nanos/dig/DigWebAgent.java | invalid json throws errors now |
|
Java | apache-2.0 | 72016682262ca1e696bf8af78e27dc54f759544a | 0 | ansell/commons-rdf | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.rdf.api;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.Optional;
import java.util.concurrent.Future;
import java.util.function.Consumer;
/**
* Builder for parsing an RDF source into a target (e.g. a Graph/Dataset).
* <p>
* This interface follows the
* <a href="https://en.wikipedia.org/wiki/Builder_pattern">Builder pattern</a>,
* allowing to set parser settings like {@link #contentType(RDFSyntax)} and
* {@link #base(IRI)}. A caller MUST call one of the <code>source</code> methods
* (e.g. {@link #source(IRI)}, {@link #source(Path)},
* {@link #source(InputStream)}), and MUST call one of the <code>target</code>
* methods (e.g. {@link #target(Consumer)}, {@link #target(Dataset)},
* {@link #target(Graph)}) before calling {@link #parse()} on the returned
* RDFParserBuilder - however methods can be called in any order.
* <p>
* The call to {@link #parse()} returns a {@link Future}, allowing asynchronous
* parse operations. Callers are recommended to check {@link Future#get()} to
* ensure parsing completed successfully, or catch exceptions thrown during
* parsing.
* <p>
* Setting a method that has already been set will override any existing value
* in the returned builder - regardless of the parameter type (e.g.
* {@link #source(IRI)} will override a previous {@link #source(Path)}. Settings
* can be unset by passing <code>null</code> - note that this may
* require casting, e.g. <code>contentType( (RDFSyntax) null )</code>
* to undo a previous call to {@link #contentType(RDFSyntax)}.
* <p>
* It is undefined if a RDFParserBuilder is mutable or thread-safe, so callers
* should always use the returned modified RDFParserBuilder from the builder
* methods. The builder may return itself after modification,
* or a cloned builder with the modified settings applied.
* Implementations are however encouraged to be immutable,
* thread-safe and document this. As an example starting point, see
* {@link org.apache.commons.rdf.simple.AbstractRDFParserBuilder}.
* <p>
* Example usage:
* </p>
*
* <pre>
* Graph g1 = rDFTermFactory.createGraph();
* new ExampleRDFParserBuilder()
* .source(Paths.get("/tmp/graph.ttl"))
* .contentType(RDFSyntax.TURTLE)
* .target(g1)
* .parse().get(30, TimeUnit.Seconds);
* </pre>
*
*/
public interface RDFParserBuilder {
/**
* The result of {@link RDFParserBuilder#parse()} indicating
* parsing completed.
* <p>
* This is a marker interface that may be subclassed to include
* parser details, e.g. warning messages or triple counts.
*/
public interface ParseResult {
}
/**
* Specify which {@link RDFTermFactory} to use for generating
* {@link RDFTerm}s.
* <p>
* This option may be used together with {@link #target(Graph)} to
* override the implementation's default factory and graph.
* <p>
* <strong>Warning:</strong> Using the same {@link RDFTermFactory} for
* multiple {@link #parse()} calls may accidentally merge
* {@link BlankNode}s having the same label, as the parser may
* use the {@link RDFTermFactory#createBlankNode(String)} method
* from the parsed blank node labels.
*
* @see #target(Graph)
* @param rdfTermFactory
* {@link RDFTermFactory} to use for generating RDFTerms.
* @return An {@link RDFParserBuilder} that will use the specified
* rdfTermFactory
*/
RDFParserBuilder rdfTermFactory(RDFTermFactory rdfTermFactory);
/**
* Specify the content type of the RDF syntax to parse.
* <p>
* This option can be used to select the RDFSyntax of the source, overriding
* any <code>Content-Type</code> headers or equivalent.
* <p>
* The character set of the RDFSyntax is assumed to be
* {@link StandardCharsets#UTF_8} unless overridden within the document
* (e.g. <?xml version="1.0" encoding="iso-8859-1"?></code> in
* {@link RDFSyntax#RDFXML}).
* <p>
* This method will override any contentType set with
* {@link #contentType(String)}.
*
* @see #contentType(String)
* @param rdfSyntax
* An {@link RDFSyntax} to parse the source according to, e.g.
* {@link RDFSyntax#TURTLE}.
* @throws IllegalArgumentException
* If this RDFParserBuilder does not support the specified
* RDFSyntax.
* @return An {@link RDFParserBuilder} that will use the specified content
* type.
*/
RDFParserBuilder contentType(RDFSyntax rdfSyntax) throws IllegalArgumentException;
/**
* Specify the content type of the RDF syntax to parse.
* <p>
* This option can be used to select the RDFSyntax of the source, overriding
* any <code>Content-Type</code> headers or equivalent.
* <p>
* The content type MAY include a <code>charset</code> parameter if the RDF
* media types permit it; the default charset is
* {@link StandardCharsets#UTF_8} unless overridden within the document.
* <p>
* This method will override any contentType set with
* {@link #contentType(RDFSyntax)}.
*
* @see #contentType(RDFSyntax)
* @param contentType
* A content-type string, e.g. <code>application/ld+json</code>
* or <code>text/turtle;charset="UTF-8"</code> as specified by
* <a href="https://tools.ietf.org/html/rfc7231#section-3.1.1.1">
* RFC7231</a>.
* @return An {@link RDFParserBuilder} that will use the specified content
* type.
* @throws IllegalArgumentException
* If the contentType has an invalid syntax, or this
* RDFParserBuilder does not support the specified contentType.
*/
RDFParserBuilder contentType(String contentType) throws IllegalArgumentException;
/**
* Specify a {@link Graph} to add parsed triples to.
* <p>
* If the source supports datasets (e.g. the {@link #contentType(RDFSyntax)}
* set has {@link RDFSyntax#supportsDataset} is true)), then only quads in
* the <em>default graph</em> will be added to the Graph as {@link Triple}s.
* <p>
* It is undefined if any triples are added to the specified {@link Graph}
* if {@link #parse()} throws any exceptions. (However implementations are
* free to prevent this using transaction mechanisms or similar). If
* {@link Future#get()} does not indicate an exception, the parser
* implementation SHOULD have inserted all parsed triples to the specified
* graph.
* <p>
* Calling this method will override any earlier targets set with
* {@link #target(Graph)}, {@link #target(Consumer)} or
* {@link #target(Dataset)}.
* <p>
* The default implementation of this method calls {@link #target(Consumer)}
* with a {@link Consumer} that does {@link Graph#add(Triple)} with
* {@link Quad#asTriple()} if the quad is in the default graph.
*
* @param graph
* The {@link Graph} to add triples to.
* @return An {@link RDFParserBuilder} that will insert triples into the
* specified graph.
*/
default RDFParserBuilder target(Graph graph) {
return target(q -> {
if (! q.getGraphName().isPresent()) {
graph.add(q.asTriple());
}
});
}
/**
* Specify a {@link Dataset} to add parsed quads to.
* <p>
* It is undefined if any quads are added to the specified
* {@link Dataset} if {@link #parse()} throws any exceptions.
* (However implementations are free to prevent this using transaction
* mechanisms or similar). On the other hand, if {@link #parse()}
* does not indicate an exception, the
* implementation SHOULD have inserted all parsed quads
* to the specified dataset.
* <p>
* Calling this method will override any earlier targets set with
* {@link #target(Graph)}, {@link #target(Consumer)} or {@link #target(Dataset)}.
* <p>
* The default implementation of this method calls {@link #target(Consumer)}
* with a {@link Consumer} that does {@link Dataset#add(Quad)}.
*
* @param dataset
* The {@link Dataset} to add quads to.
* @return An {@link RDFParserBuilder} that will insert triples into the
* specified dataset.
*/
default RDFParserBuilder target(Dataset dataset) {
return target(dataset::add);
}
/**
* Specify a consumer for parsed quads.
* <p>
* The quads will include triples in all named graphs of the parsed
* source, including any triples in the default graph.
* When parsing a source format which do not support datasets, all quads
* delivered to the consumer will be in the default graph
* (e.g. their {@link Quad#getGraphName()} will be
* as {@link Optional#empty()}), while for a source
* <p>
* It is undefined if any quads are consumed if {@link #parse()} throws any
* exceptions. On the other hand, if {@link #parse()} does not indicate an
* exception, the implementation SHOULD have produced all parsed quads to
* the specified consumer.
* <p>
* Calling this method will override any earlier targets set with
* {@link #target(Graph)}, {@link #target(Consumer)} or
* {@link #target(Dataset)}.
* <p>
* The consumer is not assumed to be thread safe - only one
* {@link Consumer#accept(Object)} is delivered at a time for a given
* {@link RDFParserBuilder#parse()} call.
* <p>
* This method is typically called with a functional consumer, for example:
* <pre>
* List<Quad> quads = new ArrayList<Quad>;
* parserBuilder.target(quads::add).parse();
* </pre>
*
* @param consumer
* A {@link Consumer} of {@link Quad}s
* @return An {@link RDFParserBuilder} that will call the consumer for into
* the specified dataset.
*/
RDFParserBuilder target(Consumer<Quad> consumer);
/**
* Specify a base IRI to use for parsing any relative IRI references.
* <p>
* Setting this option will override any protocol-specific base IRI (e.g.
* <code>Content-Location</code> header) or the {@link #source(IRI)} IRI,
* but does not override any base IRIs set within the source document (e.g.
* <code>@base</code> in Turtle documents).
* <p>
* If the source is in a syntax that does not support relative IRI
* references (e.g. {@link RDFSyntax#NTRIPLES}), setting the
* <code>base</code> has no effect.
* <p>
* This method will override any base IRI set with {@link #base(String)}.
*
* @see #base(String)
* @param base
* An absolute IRI to use as a base.
* @return An {@link RDFParserBuilder} that will use the specified base IRI.
*/
RDFParserBuilder base(IRI base);
/**
* Specify a base IRI to use for parsing any relative IRI references.
* <p>
* Setting this option will override any protocol-specific base IRI (e.g.
* <code>Content-Location</code> header) or the {@link #source(IRI)} IRI,
* but does not override any base IRIs set within the source document (e.g.
* <code>@base</code> in Turtle documents).
* <p>
* If the source is in a syntax that does not support relative IRI
* references (e.g. {@link RDFSyntax#NTRIPLES}), setting the
* <code>base</code> has no effect.
* <p>
* This method will override any base IRI set with {@link #base(IRI)}.
*
* @see #base(IRI)
* @param base
* An absolute IRI to use as a base.
* @return An {@link RDFParserBuilder} that will use the specified base IRI.
* @throws IllegalArgumentException
* If the base is not a valid absolute IRI string
*/
RDFParserBuilder base(String base) throws IllegalArgumentException;
/**
* Specify a source {@link InputStream} to parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* The InputStream will not be closed after parsing. The InputStream does
* not need to support {@link InputStream#markSupported()}.
* <p>
* The parser might not consume the complete stream (e.g. an RDF/XML parser
* may not read beyond the closing tag of
* <code></rdf:Description></code>).
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)}
* SHOULD be set before calling {@link #parse()}.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the {@link #contentType(String)} specifies otherwise or the document
* declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MUST be set before
* calling {@link #parse()}, unless the RDF syntax does not permit relative
* IRIs (e.g. {@link RDFSyntax#NTRIPLES}).
* <p>
* This method will override any source set with {@link #source(IRI)},
* {@link #source(Path)} or {@link #source(String)}.
*
* @param inputStream
* An InputStream to consume
* @return An {@link RDFParserBuilder} that will use the specified source.
*/
RDFParserBuilder source(InputStream inputStream);
/**
* Specify a source file {@link Path} to parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)}
* SHOULD be set before calling {@link #parse()}.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the {@link #contentType(String)} specifies otherwise or the document
* declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MAY be set before calling
* {@link #parse()}, otherwise {@link Path#toUri()} will be used as the base
* IRI.
* <p>
* This method will override any source set with {@link #source(IRI)},
* {@link #source(InputStream)} or {@link #source(String)}.
*
* @param file
* A Path for a file to parse
* @return An {@link RDFParserBuilder} that will use the specified source.
*/
RDFParserBuilder source(Path file);
/**
* Specify an absolute source {@link IRI} to retrieve and parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* If this builder does not support the given IRI protocol (e.g.
* <code>urn:uuid:ce667463-c5ab-4c23-9b64-701d055c4890</code>), this method
* should succeed, while the {@link #parse()} should throw an
* {@link IOException}.
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)} MAY
* be set before calling {@link #parse()}, in which case that type MAY be
* used for content negotiation (e.g. <code>Accept</code> header in HTTP),
* and SHOULD be used for selecting the RDFSyntax.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the protocol's equivalent of <code>Content-Type</code> specifies
* otherwise or the document declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MAY be set before calling
* {@link #parse()}, otherwise the source IRI will be used as the base IRI.
* <p>
* This method will override any source set with {@link #source(Path)},
* {@link #source(InputStream)} or {@link #source(String)}.
*
* @param iri
* An IRI to retrieve and parse
* @return An {@link RDFParserBuilder} that will use the specified source.
*/
RDFParserBuilder source(IRI iri);
/**
* Specify an absolute source IRI to retrieve and parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* If this builder does not support the given IRI (e.g.
* <code>urn:uuid:ce667463-c5ab-4c23-9b64-701d055c4890</code>), this method
* should succeed, while the {@link #parse()} should throw an
* {@link IOException}.
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)} MAY
* be set before calling {@link #parse()}, in which case that type MAY be
* used for content negotiation (e.g. <code>Accept</code> header in HTTP),
* and SHOULD be used for selecting the RDFSyntax.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the protocol's equivalent of <code>Content-Type</code> specifies
* otherwise or the document declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MAY be set before calling
* {@link #parse()}, otherwise the source IRI will be used as the base IRI.
* <p>
* This method will override any source set with {@link #source(Path)},
* {@link #source(InputStream)} or {@link #source(IRI)}.
*
* @param iri
* An IRI to retrieve and parse
* @return An {@link RDFParserBuilder} that will use the specified source.
* @throws IllegalArgumentException
* If the base is not a valid absolute IRI string
*
*/
RDFParserBuilder source(String iri) throws IllegalArgumentException;
/**
* Parse the specified source.
* <p>
* A source method (e.g. {@link #source(InputStream)}, {@link #source(IRI)},
* {@link #source(Path)}, {@link #source(String)} or an equivalent subclass
* method) MUST have been called before calling this method, otherwise an
* {@link IllegalStateException} will be thrown.
* <p>
* A target method (e.g. {@link #target(Consumer)}, {@link #target(Dataset)},
* {@link #target(Graph)} or an equivalent subclass method) MUST have been
* called before calling parse(), otherwise an
* {@link IllegalStateException} will be thrown.
* <p>
* It is undefined if this method is thread-safe, however the
* {@link RDFParserBuilder} may be reused (e.g. setting a different source)
* as soon as the {@link Future} has been returned from this method.
* <p>
* The RDFParserBuilder SHOULD perform the parsing as an asynchronous
* operation, and return the {@link Future} as soon as preliminary checks
* (such as validity of the {@link #source(IRI)} and
* {@link #contentType(RDFSyntax)} settings) have finished. The future
* SHOULD not mark {@link Future#isDone()} before parsing is complete. A
* synchronous implementation MAY be blocking on the <code>parse()</code>
* call and return a Future that is already {@link Future#isDone()}.
* <p>
* The returned {@link Future} contains a {@link ParseResult}.
* Implementations may subclass this interface to provide any
* parser details, e.g. list of warnings. <code>null</code> is a
* possible return value if no details are available, but
* parsing succeeded.
* <p>
* If an exception occurs during parsing, (e.g. {@link IOException} or
* {@link org.apache.commons.rdf.simple.RDFParseException}),
* it should be indicated as the
* {@link java.util.concurrent.ExecutionException#getCause()} in the
* {@link java.util.concurrent.ExecutionException} thrown on
* {@link Future#get()}.
*
* @return A Future that will return the populated {@link Graph} when the
* parsing has finished.
* @throws IOException
* If an error occurred while starting to read the source (e.g.
* file not found, unsupported IRI protocol). Note that IO
* errors during parsing would instead be the
* {@link java.util.concurrent.ExecutionException#getCause()} of
* the {@link java.util.concurrent.ExecutionException} thrown on
* {@link Future#get()}.
* @throws IllegalStateException
* If the builder is in an invalid state, e.g. a
* <code>source</code> has not been set.
*/
Future<? extends ParseResult> parse() throws IOException, IllegalStateException;
}
| api/src/main/java/org/apache/commons/rdf/api/RDFParserBuilder.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.rdf.api;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Path;
import java.util.concurrent.Future;
import java.util.function.Consumer;
/**
* Builder for parsing an RDF source into a target (e.g. a Graph/Dataset).
* <p>
* This interface follows the
* <a href="https://en.wikipedia.org/wiki/Builder_pattern">Builder pattern</a>,
* allowing to set parser settings like {@link #contentType(RDFSyntax)} and
* {@link #base(IRI)}. A caller MUST call one of the <code>source</code> methods
* (e.g. {@link #source(IRI)}, {@link #source(Path)},
* {@link #source(InputStream)}), and MUST call one of the <code>target</code>
* methods (e.g. {@link #target(Consumer)}, {@link #target(Dataset)},
* {@link #target(Graph)}) before calling {@link #parse()} on the returned
* RDFParserBuilder - however methods can be called in any order.
* <p>
* The call to {@link #parse()} returns a {@link Future}, allowing asynchronous
* parse operations. Callers are recommended to check {@link Future#get()} to
* ensure parsing completed successfully, or catch exceptions thrown during
* parsing.
* <p>
* Setting a method that has already been set will override any existing value
* in the returned builder - irregardless of the parameter type (e.g.
* {@link #source(IRI)} will override a previous {@link #source(Path)}. Settings
* can be unset by passing <code>null</code> - note that this may
* require casting, e.g. <code>contentType( (RDFSyntax) null )</code>
* to undo a previous call to {@link #contentType(RDFSyntax)}.
* <p>
* It is undefined if a RDFParserBuilder is mutable or thread-safe, so callers
* should always use the returned modified RDFParserBuilder from the builder
* methods. The builder may return itself after modification,
* or a cloned builder with the modified settings applied.
* Implementations are however encouraged to be immutable,
* thread-safe and document this. As an example starting point, see
* {@link org.apache.commons.rdf.simple.AbstractRDFParserBuilder}.
* <p>
* Example usage:
* </p>
*
* <pre>
* Graph g1 = rDFTermFactory.createGraph();
* new ExampleRDFParserBuilder()
* .source(Paths.get("/tmp/graph.ttl"))
* .contentType(RDFSyntax.TURTLE)
* .target(g1)
* .parse().get(30, TimeUnit.Seconds);
* </pre>
*
*/
public interface RDFParserBuilder {
/**
* The result of {@link RDFParserBuilder#parse()} indicating
* parsing completed.
* <p>
* This is a marker interface that may be subclassed to include
* parser details, e.g. warning messages or triple counts.
*/
public interface ParseResult {
}
/**
* Specify which {@link RDFTermFactory} to use for generating
* {@link RDFTerm}s.
* <p>
* This option may be used together with {@link #target(Graph)} to
* override the implementation's default factory and graph.
* <p>
* <strong>Warning:</strong> Using the same {@link RDFTermFactory} for
* multiple {@link #parse()} calls may accidentally merge
* {@link BlankNode}s having the same label, as the parser may
* use the {@link RDFTermFactory#createBlankNode(String)} method
* from the parsed blank node labels.
*
* @see #target(Graph)
* @param rdfTermFactory
* {@link RDFTermFactory} to use for generating RDFTerms.
* @return An {@link RDFParserBuilder} that will use the specified
* rdfTermFactory
*/
RDFParserBuilder rdfTermFactory(RDFTermFactory rdfTermFactory);
/**
* Specify the content type of the RDF syntax to parse.
* <p>
* This option can be used to select the RDFSyntax of the source, overriding
* any <code>Content-Type</code> headers or equivalent.
* <p>
* The character set of the RDFSyntax is assumed to be
* {@link StandardCharsets#UTF_8} unless overridden within the document
* (e.g. <?xml version="1.0" encoding="iso-8859-1"?></code> in
* {@link RDFSyntax#RDFXML}).
* <p>
* This method will override any contentType set with
* {@link #contentType(String)}.
*
* @see #contentType(String)
* @param rdfSyntax
* An {@link RDFSyntax} to parse the source according to, e.g.
* {@link RDFSyntax#TURTLE}.
* @throws IllegalArgumentException
* If this RDFParserBuilder does not support the specified
* RDFSyntax.
* @return An {@link RDFParserBuilder} that will use the specified content
* type.
*/
RDFParserBuilder contentType(RDFSyntax rdfSyntax) throws IllegalArgumentException;
/**
* Specify the content type of the RDF syntax to parse.
* <p>
* This option can be used to select the RDFSyntax of the source, overriding
* any <code>Content-Type</code> headers or equivalent.
* <p>
* The content type MAY include a <code>charset</code> parameter if the RDF
* media types permit it; the default charset is
* {@link StandardCharsets#UTF_8} unless overridden within the document.
* <p>
* This method will override any contentType set with
* {@link #contentType(RDFSyntax)}.
*
* @see #contentType(RDFSyntax)
* @param contentType
* A content-type string, e.g. <code>application/ld+json</code>
* or <code>text/turtle;charset="UTF-8"</code> as specified by
* <a href="https://tools.ietf.org/html/rfc7231#section-3.1.1.1">
* RFC7231</a>.
* @return An {@link RDFParserBuilder} that will use the specified content
* type.
* @throws IllegalArgumentException
* If the contentType has an invalid syntax, or this
* RDFParserBuilder does not support the specified contentType.
*/
RDFParserBuilder contentType(String contentType) throws IllegalArgumentException;
/**
* Specify a {@link Graph} to add parsed triples to.
* <p>
* If the source supports datasets (e.g. the {@link #contentType(RDFSyntax)}
* set has {@link RDFSyntax#supportsDataset} is true)), then only quads in
* the <em>default graph</em> will be added to the Graph as {@link Triple}s.
* <p>
* It is undefined if any triples are added to the specified {@link Graph}
* if {@link #parse()} throws any exceptions. (However implementations are
* free to prevent this using transaction mechanisms or similar). If
* {@link Future#get()} does not indicate an exception, the parser
* implementation SHOULD have inserted all parsed triples to the specified
* graph.
* <p>
* Calling this method will override any earlier targets set with
* {@link #target(Graph)}, {@link #target(Consumer)} or
* {@link #target(Dataset)}.
* <p>
* The default implementation of this method calls {@link #target(Consumer)}
* with a {@link Consumer} that does {@link Graph#add(Triple)} with
* {@link Quad#asTriple()} if the quad is in the default graph.
*
* @param graph
* The {@link Graph} to add triples to.
* @return An {@link RDFParserBuilder} that will insert triples into the
* specified graph.
*/
default RDFParserBuilder target(Graph graph) {
return target(q -> {
if (! q.getGraphName().isPresent()) {
graph.add(q.asTriple());
}
});
}
/**
* Specify a {@link Dataset} to add parsed quads to.
* <p>
* It is undefined if any quads are added to the specified
* {@link Dataset} if {@link #parse()} throws any exceptions.
* (However implementations are free to prevent this using transaction
* mechanisms or similar). On the other hand, if {@link #parse()}
* does not indicate an exception, the
* implementation SHOULD have inserted all parsed quads
* to the specified dataset.
* <p>
* Calling this method will override any earlier targets set with
* {@link #target(Graph)}, {@link #target(Consumer)} or {@link #target(Dataset)}.
* <p>
* The default implementation of this method calls {@link #target(Consumer)}
* with a {@link Consumer} that does {@link Dataset#add(Quad)}.
*
* @param dataset
* The {@link Dataset} to add quads to.
* @return An {@link RDFParserBuilder} that will insert triples into the
* specified dataset.
*/
default RDFParserBuilder target(Dataset dataset) {
return target(dataset::add);
}
/**
* Specify a consumer for parsed quads.
* <p>
* It is undefined if any quads are consumed if {@link #parse()} throws any
* exceptions. On the other hand, if {@link #parse()} does not indicate an
* exception, the implementation SHOULD have produced all parsed quads to
* the specified consumer.
* <p>
* Calling this method will override any earlier targets set with
* {@link #target(Graph)}, {@link #target(Consumer)} or
* {@link #target(Dataset)}.
* <p>
* The consumer is not assumed to be thread safe - only one
* {@link Consumer#accept(Object)} is delivered at a time for a given
* {@link RDFParserBuilder#parse()} call.
* <p>
* This method is typically called with a functional consumer, for example:
* <pre>
* List<Quad> quads = new ArrayList<Quad>;
* parserBuilder.target(quads::add).parse();
* </pre>
*
* @param consumer
* A {@link Consumer} of {@link Quad}s
* @return An {@link RDFParserBuilder} that will call the consumer for into
* the specified dataset.
* @return
*/
RDFParserBuilder target(Consumer<Quad> consumer);
/**
* Specify a base IRI to use for parsing any relative IRI references.
* <p>
* Setting this option will override any protocol-specific base IRI (e.g.
* <code>Content-Location</code> header) or the {@link #source(IRI)} IRI,
* but does not override any base IRIs set within the source document (e.g.
* <code>@base</code> in Turtle documents).
* <p>
* If the source is in a syntax that does not support relative IRI
* references (e.g. {@link RDFSyntax#NTRIPLES}), setting the
* <code>base</code> has no effect.
* <p>
* This method will override any base IRI set with {@link #base(String)}.
*
* @see #base(String)
* @param base
* An absolute IRI to use as a base.
* @return An {@link RDFParserBuilder} that will use the specified base IRI.
*/
RDFParserBuilder base(IRI base);
/**
* Specify a base IRI to use for parsing any relative IRI references.
* <p>
* Setting this option will override any protocol-specific base IRI (e.g.
* <code>Content-Location</code> header) or the {@link #source(IRI)} IRI,
* but does not override any base IRIs set within the source document (e.g.
* <code>@base</code> in Turtle documents).
* <p>
* If the source is in a syntax that does not support relative IRI
* references (e.g. {@link RDFSyntax#NTRIPLES}), setting the
* <code>base</code> has no effect.
* <p>
* This method will override any base IRI set with {@link #base(IRI)}.
*
* @see #base(IRI)
* @param base
* An absolute IRI to use as a base.
* @return An {@link RDFParserBuilder} that will use the specified base IRI.
* @throws IllegalArgumentException
* If the base is not a valid absolute IRI string
*/
RDFParserBuilder base(String base) throws IllegalArgumentException;
/**
* Specify a source {@link InputStream} to parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* The InputStream will not be closed after parsing. The InputStream does
* not need to support {@link InputStream#markSupported()}.
* <p>
* The parser might not consume the complete stream (e.g. an RDF/XML parser
* may not read beyond the closing tag of
* <code></rdf:Description></code>).
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)}
* SHOULD be set before calling {@link #parse()}.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the {@link #contentType(String)} specifies otherwise or the document
* declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MUST be set before
* calling {@link #parse()}, unless the RDF syntax does not permit relative
* IRIs (e.g. {@link RDFSyntax#NTRIPLES}).
* <p>
* This method will override any source set with {@link #source(IRI)},
* {@link #source(Path)} or {@link #source(String)}.
*
* @param inputStream
* An InputStream to consume
* @return An {@link RDFParserBuilder} that will use the specified source.
*/
RDFParserBuilder source(InputStream inputStream);
/**
* Specify a source file {@link Path} to parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)}
* SHOULD be set before calling {@link #parse()}.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the {@link #contentType(String)} specifies otherwise or the document
* declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MAY be set before calling
* {@link #parse()}, otherwise {@link Path#toUri()} will be used as the base
* IRI.
* <p>
* This method will override any source set with {@link #source(IRI)},
* {@link #source(InputStream)} or {@link #source(String)}.
*
* @param file
* A Path for a file to parse
* @return An {@link RDFParserBuilder} that will use the specified source.
*/
RDFParserBuilder source(Path file);
/**
* Specify an absolute source {@link IRI} to retrieve and parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* If this builder does not support the given IRI (e.g.
* <code>urn:uuid:ce667463-c5ab-4c23-9b64-701d055c4890</code>), this method
* should succeed, while the {@link #parse()} should throw an
* {@link IOException}.
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)} MAY
* be set before calling {@link #parse()}, in which case that type MAY be
* used for content negotiation (e.g. <code>Accept</code> header in HTTP),
* and SHOULD be used for selecting the RDFSyntax.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the protocol's equivalent of <code>Content-Type</code> specifies
* otherwise or the document declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MAY be set before calling
* {@link #parse()}, otherwise the source IRI will be used as the base IRI.
* <p>
* This method will override any source set with {@link #source(Path)},
* {@link #source(InputStream)} or {@link #source(String)}.
*
* @param iri
* An IRI to retrieve and parse
* @return An {@link RDFParserBuilder} that will use the specified source.
*/
RDFParserBuilder source(IRI iri);
/**
* Specify an absolute source IRI to retrieve and parse.
* <p>
* The source set will not be read before the call to {@link #parse()}.
* <p>
* If this builder does not support the given IRI (e.g.
* <code>urn:uuid:ce667463-c5ab-4c23-9b64-701d055c4890</code>), this method
* should succeed, while the {@link #parse()} should throw an
* {@link IOException}.
* <p>
* The {@link #contentType(RDFSyntax)} or {@link #contentType(String)} MAY
* be set before calling {@link #parse()}, in which case that type MAY be
* used for content negotiation (e.g. <code>Accept</code> header in HTTP),
* and SHOULD be used for selecting the RDFSyntax.
* <p>
* The character set is assumed to be {@link StandardCharsets#UTF_8} unless
* the protocol's equivalent of <code>Content-Type</code> specifies
* otherwise or the document declares its own charset (e.g. RDF/XML with a
* <code><?xml encoding="iso-8859-1"></code> header).
* <p>
* The {@link #base(IRI)} or {@link #base(String)} MAY be set before calling
* {@link #parse()}, otherwise the source IRI will be used as the base IRI.
* <p>
* This method will override any source set with {@link #source(Path)},
* {@link #source(InputStream)} or {@link #source(IRI)}.
*
* @param iri
* An IRI to retrieve and parse
* @return An {@link RDFParserBuilder} that will use the specified source.
* @throws IllegalArgumentException
* If the base is not a valid absolute IRI string
*
*/
RDFParserBuilder source(String iri) throws IllegalArgumentException;
/**
* Parse the specified source.
* <p>
* A source method (e.g. {@link #source(InputStream)}, {@link #source(IRI)},
* {@link #source(Path)}, {@link #source(String)} or an equivalent subclass
* method) MUST have been called before calling this method, otherwise an
* {@link IllegalStateException} will be thrown.
* <p>
* A target method (e.g. {@link #target(Consumer)}, {@link #target(Dataset)}
* , {@link #target(Graph)} or an equivalent subclass method) MUST have been
* called before calling this method, otherwise an
* {@link IllegalStateException} will be thrown.
* <p>
* It is undefined if this method is thread-safe, however the
* {@link RDFParserBuilder} may be reused (e.g. setting a different source)
* as soon as the {@link Future} has been returned from this method.
* <p>
* The RDFParserBuilder SHOULD perform the parsing as an asynchronous
* operation, and return the {@link Future} as soon as preliminary checks
* (such as validity of the {@link #source(IRI)} and
* {@link #contentType(RDFSyntax)} settings) have finished. The future
* SHOULD not mark {@link Future#isDone()} before parsing is complete. A
* synchronous implementation MAY be blocking on the <code>parse()</code>
* call and return a Future that is already {@link Future#isDone()}.
* <p>
* The returned {@link Future} contains a {@link ParseResult}.
* Implementations may subclass this interface to provide any
* parser details, e.g. list of warnings. <code>null</code> is a
* possible return value if no details are available, but
* parsing succeeded.
* <p>
* If an exception occurs during parsing, (e.g. {@link IOException} or
* {@link org.apache.commons.rdf.simple.AbstractRDFParserBuilder.RDFParseException}),
* it should be indicated as the
* {@link java.util.concurrent.ExecutionException#getCause()} in the
* {@link java.util.concurrent.ExecutionException} thrown on
* {@link Future#get()}.
*
* @return A Future that will return the populated {@link Graph} when the
* parsing has finished.
* @throws IOException
* If an error occurred while starting to read the source (e.g.
* file not found, unsupported IRI protocol). Note that IO
* errors during parsing would instead be the
* {@link java.util.concurrent.ExecutionException#getCause()} of
* the {@link java.util.concurrent.ExecutionException} thrown on
* {@link Future#get()}.
* @throws IllegalStateException
* If the builder is in an invalid state, e.g. a
* <code>source</code> has not been set.
*/
Future<? extends ParseResult> parse() throws IOException, IllegalStateException;
}
| target(Consumer<Quad>) includes all triples
| api/src/main/java/org/apache/commons/rdf/api/RDFParserBuilder.java | target(Consumer<Quad>) includes all triples |
|
Java | apache-2.0 | 650f721de7149bf04d6115a1bf2ecbe778c0e125 | 0 | TouK/ThrowingFunction | package com.pivovarit.function;
import com.pivovarit.function.exception.WrappedException;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.util.concurrent.atomic.LongAdder;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
class ThrowingBiConsumerTest {
@Test
void shouldConsume() throws Exception {
// given
LongAdder input = new LongAdder();
ThrowingBiConsumer<Integer, Integer, Exception> consumer = (i, j) -> input.increment();
// when
consumer.accept(2, 3);
// then
assertThat(input.sum()).isEqualTo(1);
}
@Test
void shouldConsumeAfter() throws Exception {
// given
LongAdder input = new LongAdder();
ThrowingBiConsumer<Integer, Integer, Exception> consumer = (i, j) -> input.increment();
ThrowingBiConsumer<Integer, Integer, Exception> after = (i, j) -> input.increment();
// when
consumer.andThenConsume(after).accept(2, 3);
// then
assertThat(input.sum()).isEqualTo(2);
}
@Test
void shouldConsumeAsFunction() throws Exception {
// given
LongAdder input = new LongAdder();
ThrowingBiConsumer<Integer, Integer, Exception> consumer = (i, j) -> input.increment();
// when
consumer.asFunction().apply(42, 0);
// then
assertThat(input.sum()).isEqualTo(1);
}
@Test
void shouldConsumeAndThrowUnchecked() {
IOException cause = new IOException("some message");
// given
ThrowingBiConsumer<Integer, Integer, IOException> consumer = (i, j) -> { throw cause; };
// when
assertThatThrownBy(() -> ThrowingBiConsumer.unchecked(consumer).accept(3, 3))
.hasMessage(cause.getMessage())
.isInstanceOf(WrappedException.class)
.hasCauseInstanceOf(cause.getClass());
}
@Test
void shouldConsumeUnchecked() {
// given
ThrowingBiConsumer<Integer, Integer, IOException> consumer = (i, j) -> {};
// when
ThrowingBiConsumer.unchecked(consumer).accept(3, 4);
// then no exception thrown
}
@Test
void shouldConsumeAndSneakyThrow() {
IOException cause = new IOException("some message");
// given
ThrowingBiConsumer<Integer, Integer, IOException> consumer = (i, j) -> { throw cause; };
// when
assertThatThrownBy(() -> ThrowingBiConsumer.sneaky(consumer).accept(3, 3))
.hasMessage(cause.getMessage())
.isInstanceOf(IOException.class)
.hasNoCause();
}
} | src/test/java/com/pivovarit/function/ThrowingBiConsumerTest.java | package com.pivovarit.function;
import com.pivovarit.function.exception.WrappedException;
import org.junit.jupiter.api.Test;
import java.io.IOException;
import java.util.concurrent.atomic.LongAdder;
import static org.assertj.core.api.Assertions.assertThat;
import static org.assertj.core.api.AssertionsForClassTypes.assertThatThrownBy;
class ThrowingBiConsumerTest {
@Test
void givenString_whenSortJava8_thenSorted() {
String sorted = "bdCa".chars()
.sorted()
.collect(StringBuilder::new, StringBuilder::appendCodePoint, StringBuilder::append)
.toString();
assertThat(sorted).isEqualTo("abcd");
}
@Test
void shouldConsume() throws Exception {
// given
LongAdder input = new LongAdder();
ThrowingBiConsumer<Integer, Integer, Exception> consumer = (i, j) -> input.increment();
// when
consumer.accept(2, 3);
// then
assertThat(input.sum()).isEqualTo(1);
}
@Test
void shouldConsumeAfter() throws Exception {
// given
LongAdder input = new LongAdder();
ThrowingBiConsumer<Integer, Integer, Exception> consumer = (i, j) -> input.increment();
ThrowingBiConsumer<Integer, Integer, Exception> after = (i, j) -> input.increment();
// when
consumer.andThenConsume(after).accept(2, 3);
// then
assertThat(input.sum()).isEqualTo(2);
}
@Test
void shouldConsumeAsFunction() throws Exception {
// given
LongAdder input = new LongAdder();
ThrowingBiConsumer<Integer, Integer, Exception> consumer = (i, j) -> input.increment();
// when
consumer.asFunction().apply(42, 0);
// then
assertThat(input.sum()).isEqualTo(1);
}
@Test
void shouldConsumeAndThrowUnchecked() {
IOException cause = new IOException("some message");
// given
ThrowingBiConsumer<Integer, Integer, IOException> consumer = (i, j) -> { throw cause; };
// when
assertThatThrownBy(() -> ThrowingBiConsumer.unchecked(consumer).accept(3, 3))
.hasMessage(cause.getMessage())
.isInstanceOf(WrappedException.class)
.hasCauseInstanceOf(cause.getClass());
}
@Test
void shouldConsumeUnchecked() {
// given
ThrowingBiConsumer<Integer, Integer, IOException> consumer = (i, j) -> {};
// when
ThrowingBiConsumer.unchecked(consumer).accept(3, 4);
// then no exception thrown
}
@Test
void shouldConsumeAndSneakyThrow() {
IOException cause = new IOException("some message");
// given
ThrowingBiConsumer<Integer, Integer, IOException> consumer = (i, j) -> { throw cause; };
// when
assertThatThrownBy(() -> ThrowingBiConsumer.sneaky(consumer).accept(3, 3))
.hasMessage(cause.getMessage())
.isInstanceOf(IOException.class)
.hasNoCause();
}
} | Remove unrelated tests (#48)
| src/test/java/com/pivovarit/function/ThrowingBiConsumerTest.java | Remove unrelated tests (#48) |
|
Java | bsd-2-clause | 661bab44ba17359a2001e1b1b3e9c47c5c9b9d8c | 0 | scifio/scifio | //
// ImporterOptions.java
//
/*
LOCI Plugins for ImageJ: a collection of ImageJ plugins including the
Bio-Formats Importer, Bio-Formats Exporter, Bio-Formats Macro Extensions,
Data Browser, Stack Colorizer and Stack Slicer. Copyright (C) 2005-@year@
Melissa Linkert, Curtis Rueden and Christopher Peterson.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.plugins;
import com.jgoodies.forms.builder.PanelBuilder;
import com.jgoodies.forms.layout.CellConstraints;
import com.jgoodies.forms.layout.FormLayout;
import ij.*;
import ij.gui.GenericDialog;
import ij.io.OpenDialog;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.util.*;
import javax.swing.*;
import loci.common.*;
import loci.formats.*;
/**
* Helper class for managing Bio-Formats Importer options.
* Gets parameter values through a variety of means, including
* preferences from IJ_Prefs.txt, plugin argument string, macro options,
* and user input from dialog boxes.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/components/loci-plugins/src/loci/plugins/ImporterOptions.java">Trac</a>,
* <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/components/loci-plugins/src/loci/plugins/ImporterOptions.java">SVN</a></dd></dl>
*/
public class ImporterOptions
implements FocusListener, ItemListener, MouseListener
{
// -- Constants --
// enumeration for status
public static final int STATUS_OK = 0;
public static final int STATUS_CANCELED = 1;
public static final int STATUS_FINISHED = 2;
// enumeration for stackFormat
public static final String VIEW_NONE = "Metadata only";
public static final String VIEW_STANDARD = "Standard ImageJ";
public static final String VIEW_HYPERSTACK = "Hyperstack";
public static final String VIEW_BROWSER = "Data Browser";
public static final String VIEW_VISBIO = "VisBio";
public static final String VIEW_IMAGE_5D = "Image5D";
public static final String VIEW_VIEW_5D = "View5D";
// enumeration for stackOrder
public static final String ORDER_DEFAULT = "Default";
public static final String ORDER_XYZCT = "XYZCT";
public static final String ORDER_XYZTC = "XYZTC";
public static final String ORDER_XYCZT = "XYCZT";
public static final String ORDER_XYTCZ = "XYTCZ";
public static final String ORDER_XYCTZ = "XYCTZ";
public static final String ORDER_XYTZC = "XYTZC";
// merging options
public static final String MERGE_DEFAULT = "Do not merge";
public static final String MERGE_PROJECTION = "Spectral projection";
// class to check for each viewing option
private static final String CLASS_VISBIO = "loci.visbio.VisBio";
private static final String CLASS_IMAGE_5D = "i5d.Image5D";
private static final String CLASS_VIEW_5D = "View5D_";
// enumeration for location
public static final String LOCATION_LOCAL = "Local machine";
public static final String LOCATION_HTTP = "Internet";
public static final String LOCATION_OME = "OME server";
public static final String LOCATION_OMERO = "OMERO server";
public static final String[] LOCATIONS = {
LOCATION_LOCAL, LOCATION_HTTP, LOCATION_OME, LOCATION_OMERO
};
// keys for use in IJ_Prefs.txt
public static final String PREF_STACK = "bioformats.stackFormat";
public static final String PREF_ORDER = "bioformats.stackOrder";
public static final String PREF_MERGE = "bioformats.mergeChannels";
public static final String PREF_COLORIZE = "bioformats.colorize";
public static final String PREF_C = "bioformats.splitWindows";
public static final String PREF_Z = "bioformats.splitFocalPlanes";
public static final String PREF_T = "bioformats.splitTimepoints";
public static final String PREF_CROP = "bioformats.crop";
public static final String PREF_METADATA = "bioformats.showMetadata";
public static final String PREF_OME_XML = "bioformats.showOMEXML";
public static final String PREF_GROUP = "bioformats.groupFiles";
public static final String PREF_CONCATENATE = "bioformats.concatenate";
public static final String PREF_RANGE = "bioformats.specifyRanges";
public static final String PREF_AUTOSCALE = "bioformats.autoscale";
public static final String PREF_VIRTUAL = "bioformats.virtual";
public static final String PREF_RECORD = "bioformats.record";
public static final String PREF_ALL_SERIES = "bioformats.openAllSeries";
public static final String PREF_MERGE_OPTION = "bioformats.mergeOption";
public static final String PREF_WINDOWLESS = "bioformats.windowless";
public static final String PREF_SERIES = "bioformats.series";
public static final String PREF_FIRST = "bioformats.firstTime";
public static final String PREF_THUMBNAIL = "bioformats.forceThumbnails";
public static final String PREF_SWAP = "bioformats.swapDimensions";
// labels for user dialog; when trimmed these double as argument & macro keys
public static final String LABEL_STACK = "View stack with: ";
public static final String LABEL_ORDER = "Stack_order: ";
public static final String LABEL_MERGE = "Merge_channels to RGB";
public static final String LABEL_COLORIZE = "Colorize channels";
public static final String LABEL_C = "Split_channels";
public static final String LABEL_Z = "Split_focal planes";
public static final String LABEL_T = "Split_timepoints";
public static final String LABEL_CROP = "Crop on import";
public static final String LABEL_METADATA =
"Display_metadata in results window";
public static final String LABEL_OME_XML = "Display_OME-XML metadata";
public static final String LABEL_GROUP = "Group_files with similar names";
public static final String LABEL_CONCATENATE =
"Concatenate_series when compatible";
public static final String LABEL_RANGE = "Specify_range for each series";
public static final String LABEL_AUTOSCALE = "Autoscale images";
public static final String LABEL_VIRTUAL = "Use_virtual_stack";
public static final String LABEL_RECORD =
"Record_modifications_to_virtual_stack";
public static final String LABEL_ALL_SERIES = "Open_all_series";
public static final String LABEL_SWAP = "Swap_dimensions";
public static final String LABEL_MERGE_OPTION = "Merging Options";
public static final String LABEL_WINDOWLESS = "windowless";
public static final String LABEL_SERIES = "series";
public static final String LABEL_LOCATION = "Location: ";
public static final String LABEL_ID = "Open";
// informative description of each option
public static final String INFO_STACK =
info(LABEL_STACK) + " Description to go here.";
public static final String INFO_ORDER =
info(LABEL_ORDER) + " Description to go here.";
public static final String INFO_MERGE =
info(LABEL_MERGE) + " Description to go here.";
public static final String INFO_COLORIZE =
info(LABEL_COLORIZE) + " Description to go here.";
public static final String INFO_C =
info(LABEL_C) + " Description to go here.";
public static final String INFO_Z =
info(LABEL_Z) + " Description to go here.";
public static final String INFO_T =
info(LABEL_T) + " Description to go here.";
public static final String INFO_CROP =
info(LABEL_CROP) + " Description to go here.";
public static final String INFO_METADATA =
info(LABEL_METADATA) + " Description to go here.";
public static final String INFO_OME_XML =
info(LABEL_OME_XML) + " Description to go here.";
public static final String INFO_GROUP =
info(LABEL_GROUP) + " Description to go here.";
public static final String INFO_CONCATENATE =
info(LABEL_CONCATENATE) + " Description to go here.";
public static final String INFO_RANGE =
info(LABEL_RANGE) + " Description to go here.";
public static final String INFO_AUTOSCALE =
info(LABEL_AUTOSCALE) + " Description to go here.";
public static final String INFO_VIRTUAL =
info(LABEL_VIRTUAL) + " Description to go here.";
public static final String INFO_RECORD =
info(LABEL_RECORD) + " Description to go here.";
public static final String INFO_ALL_SERIES =
info(LABEL_ALL_SERIES) + " Description to go here.";
public static final String INFO_SWAP =
info(LABEL_SWAP) + " Description to go here.";
public static final String INFO_DEFAULT =
"<i>Mouse over an option for a description.</i>";
// -- Fields - GUI components --
private Choice stackChoice;
private Choice orderChoice;
private Checkbox mergeBox;
private Checkbox colorizeBox;
private Checkbox splitCBox;
private Checkbox splitZBox;
private Checkbox splitTBox;
private Checkbox metadataBox;
private Checkbox omexmlBox;
private Checkbox groupBox;
private Checkbox concatenateBox;
private Checkbox rangeBox;
private Checkbox autoscaleBox;
private Checkbox virtualBox;
private Checkbox recordBox;
private Checkbox allSeriesBox;
private Checkbox cropBox;
private Checkbox swapBox;
private Hashtable infoTable;
private JEditorPane infoPane;
private Choice mergeChoice;
// -- Fields - core options --
private boolean firstTime;
private String stackFormat;
private String stackOrder;
private boolean mergeChannels;
private boolean colorize;
private boolean splitChannels;
private boolean splitFocalPlanes;
private boolean splitTimepoints;
private boolean crop;
private boolean showMetadata;
private boolean showOMEXML;
private boolean groupFiles;
private boolean concatenate;
private boolean specifyRanges;
private boolean autoscale;
private boolean virtual;
private boolean record;
private boolean openAllSeries;
private boolean swapDimensions;
private String mergeOption;
private boolean windowless;
private String seriesString;
private boolean forceThumbnails;
private String location;
private String id;
private boolean quiet;
private Location idLoc;
private String idName;
private String idType;
// -- ImporterOptions methods - accessors --
public boolean isFirstTime() { return firstTime; }
public String getStackFormat() { return stackFormat; }
public String getStackOrder() { return stackOrder; }
public boolean isMergeChannels() { return mergeChannels; }
public boolean isColorize() { return colorize; }
public boolean isSplitChannels() { return splitChannels; }
public boolean isSplitFocalPlanes() { return splitFocalPlanes; }
public boolean isSplitTimepoints() { return splitTimepoints; }
public boolean isShowMetadata() { return showMetadata; }
public boolean isShowOMEXML() { return showOMEXML; }
public boolean isGroupFiles() { return groupFiles; }
public boolean isConcatenate() { return concatenate; }
public boolean isSpecifyRanges() { return specifyRanges; }
public boolean isForceThumbnails() { return forceThumbnails; }
public boolean isAutoscale() { return autoscale; }
public boolean isWindowless() { return windowless; }
public boolean isVirtual() { return virtual; }
public boolean isRecord() { return record; }
public boolean openAllSeries() { return openAllSeries; }
public boolean doCrop() { return crop; }
public boolean isSwapDimensions() { return swapDimensions; }
public String getMergeOption() { return mergeOption; }
public boolean isViewNone() { return VIEW_NONE.equals(stackFormat); }
public boolean isViewStandard() { return VIEW_STANDARD.equals(stackFormat); }
public boolean isViewHyperstack() {
return VIEW_HYPERSTACK.equals(stackFormat);
}
public boolean isViewBrowser() { return VIEW_BROWSER.equals(stackFormat); }
public boolean isViewVisBio() { return VIEW_VISBIO.equals(stackFormat); }
public boolean isViewImage5D() { return VIEW_IMAGE_5D.equals(stackFormat); }
public boolean isViewView5D() { return VIEW_VIEW_5D.equals(stackFormat); }
public String getLocation() { return location; }
public String getId() { return id; }
public boolean isQuiet() { return quiet; }
public boolean isLocal() { return LOCATION_LOCAL.equals(location); }
public boolean isHTTP() { return LOCATION_HTTP.equals(location); }
public boolean isOME() { return LOCATION_OME.equals(location); }
public boolean isOMERO() { return LOCATION_OMERO.equals(location); }
public Location getIdLocation() { return idLoc; }
public String getIdName() { return idName; }
public String getIdType() { return idType; }
// -- ImporterOptions methods - mutators --
public void setStackFormat(String s) { stackFormat = s; }
public void setStackOrder(String s) { stackOrder = s; }
public void setMergeChannels(boolean b) { mergeChannels = b; }
public void setColorize(boolean b) { colorize = b; }
public void setSplitChannels(boolean b) { splitChannels = b; }
public void setSplitFocalPlanes(boolean b) { splitFocalPlanes = b; }
public void setSplitTimepoints(boolean b) { splitTimepoints = b; }
public void setShowMetadata(boolean b) { showMetadata = b; }
public void setShowOMEXML(boolean b) { showOMEXML = b; }
public void setGroupFiles(boolean b) { groupFiles = b; }
public void setConcatenate(boolean b) { concatenate = b; }
public void setSpecifyRanges(boolean b) { specifyRanges = b; }
public void setForceThumbnails(boolean b) { forceThumbnails = b; }
public void setAutoscale(boolean b) { autoscale = b; }
public void setWindowless(boolean b) { windowless = b; }
public void setVirtual(boolean b) { virtual = b; }
public void setRecord(boolean b) { record = b; }
public void setOpenAllSeries(boolean b) { openAllSeries = b; }
public void setCrop(boolean b) { crop = b; }
public void setSwapDimensions(boolean b) { swapDimensions = b; }
// -- ImporterOptions methods --
/** Loads default option values from IJ_Prefs.txt. */
public void loadPreferences() {
stackFormat = Prefs.get(PREF_STACK, VIEW_STANDARD);
stackOrder = Prefs.get(PREF_ORDER, ORDER_DEFAULT);
mergeChannels = Prefs.get(PREF_MERGE, false);
colorize = Prefs.get(PREF_COLORIZE, true);
splitChannels = Prefs.get(PREF_C, true);
splitFocalPlanes = Prefs.get(PREF_Z, false);
splitTimepoints = Prefs.get(PREF_T, false);
crop = Prefs.get(PREF_CROP, false);
showMetadata = Prefs.get(PREF_METADATA, false);
showOMEXML = Prefs.get(PREF_OME_XML, false);
groupFiles = Prefs.get(PREF_GROUP, false);
concatenate = Prefs.get(PREF_CONCATENATE, false);
specifyRanges = Prefs.get(PREF_RANGE, false);
autoscale = Prefs.get(PREF_AUTOSCALE, true);
virtual = Prefs.get(PREF_VIRTUAL, false);
record = Prefs.get(PREF_RECORD, true);
openAllSeries = Prefs.get(PREF_ALL_SERIES, false);
swapDimensions = Prefs.get(PREF_SWAP, false);
mergeOption = Prefs.get(PREF_MERGE_OPTION, MERGE_DEFAULT);
windowless = Prefs.get(PREF_WINDOWLESS, false);
seriesString = Prefs.get(PREF_SERIES, "0");
firstTime = Prefs.get(PREF_FIRST, true);
forceThumbnails = Prefs.get(PREF_THUMBNAIL, false);
}
/** Saves option values to IJ_Prefs.txt as the new defaults. */
public void savePreferences() {
Prefs.set(PREF_STACK, stackFormat);
Prefs.set(PREF_ORDER, stackOrder);
Prefs.set(PREF_MERGE, mergeChannels);
Prefs.set(PREF_COLORIZE, colorize);
Prefs.set(PREF_C, splitChannels);
Prefs.set(PREF_Z, splitFocalPlanes);
Prefs.set(PREF_T, splitTimepoints);
Prefs.set(PREF_CROP, crop);
Prefs.set(PREF_METADATA, showMetadata);
Prefs.set(PREF_OME_XML, showOMEXML);
Prefs.set(PREF_GROUP, groupFiles);
Prefs.set(PREF_CONCATENATE, concatenate);
Prefs.set(PREF_RANGE, specifyRanges);
Prefs.set(PREF_AUTOSCALE, autoscale);
Prefs.set(PREF_VIRTUAL, virtual);
Prefs.set(PREF_RECORD, record);
Prefs.set(PREF_ALL_SERIES, openAllSeries);
Prefs.set(PREF_SWAP, swapDimensions);
Prefs.set(PREF_MERGE_OPTION, mergeOption);
Prefs.set(PREF_WINDOWLESS, windowless);
Prefs.set(PREF_SERIES, seriesString);
Prefs.set(PREF_FIRST, false);
//Prefs.set(PREF_THUMBNAIL, forceThumbnails);
}
/** Parses the plugin argument for parameter values. */
public void parseArg(String arg) {
if (arg == null || arg.length() == 0) return;
if (new Location(arg).exists()) {
// old style arg: entire argument is a file path
// this style is used by the HandleExtraFileTypes plugin
// NB: This functionality must not be removed, or the plugin
// will stop working correctly with HandleExtraFileTypes.
location = LOCATION_LOCAL;
id = arg;
quiet = true; // suppress obnoxious error messages and such
}
else {
// new style arg: split up similar to a macro options string, but
// slightly different than macro options, in that boolean arguments
// must be of the form "key=true" rather than just "key"
// only the core options are supported for now
// NB: This functionality enables multiple plugin entries to achieve
// distinct behavior by calling the LociImporter plugin differently.
stackFormat = Macro.getValue(arg, LABEL_STACK, stackFormat);
stackOrder = Macro.getValue(arg, LABEL_ORDER, stackOrder);
mergeChannels = getMacroValue(arg, LABEL_MERGE, mergeChannels);
colorize = getMacroValue(arg, LABEL_COLORIZE, colorize);
splitChannels = getMacroValue(arg, LABEL_C, splitChannels);
splitFocalPlanes = getMacroValue(arg, LABEL_Z, splitFocalPlanes);
splitTimepoints = getMacroValue(arg, LABEL_T, splitTimepoints);
crop = getMacroValue(arg, LABEL_CROP, crop);
showMetadata = getMacroValue(arg, LABEL_METADATA, showMetadata);
showOMEXML = getMacroValue(arg, LABEL_OME_XML, showOMEXML);
groupFiles = getMacroValue(arg, LABEL_GROUP, groupFiles);
concatenate = getMacroValue(arg, LABEL_CONCATENATE, concatenate);
specifyRanges = getMacroValue(arg, LABEL_RANGE, specifyRanges);
autoscale = getMacroValue(arg, LABEL_AUTOSCALE, autoscale);
virtual = getMacroValue(arg, LABEL_VIRTUAL, virtual);
record = getMacroValue(arg, LABEL_RECORD, record);
openAllSeries = getMacroValue(arg, LABEL_ALL_SERIES, openAllSeries);
swapDimensions = getMacroValue(arg, LABEL_SWAP, swapDimensions);
mergeOption = Macro.getValue(arg, LABEL_MERGE_OPTION, mergeOption);
windowless = getMacroValue(arg, LABEL_WINDOWLESS, windowless);
seriesString = Macro.getValue(arg, LABEL_SERIES, "0");
location = Macro.getValue(arg, LABEL_LOCATION, location);
id = Macro.getValue(arg, LABEL_ID, id);
}
}
/**
* Gets the location (type of data source) from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptLocation() {
if (location == null) {
// Open a dialog asking the user what kind of dataset to handle.
// Ask only if the location was not already specified somehow.
// ImageJ will grab the value from the macro options, when possible.
GenericDialog gd = new GenericDialog("Bio-Formats Dataset Location");
gd.addChoice(LABEL_LOCATION, LOCATIONS, LOCATION_LOCAL);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
location = gd.getNextChoice();
}
// verify that location is valid
boolean isLocal = LOCATION_LOCAL.equals(location);
boolean isHTTP = LOCATION_HTTP.equals(location);
boolean isOME = LOCATION_OME.equals(location);
boolean isOMERO = LOCATION_OMERO.equals(location);
if (!isLocal && !isHTTP && !isOME && !isOMERO) {
if (!quiet) IJ.error("Bio-Formats", "Invalid location: " + location);
return STATUS_FINISHED;
}
return STATUS_OK;
}
/**
* Gets the id (e.g., filename or URL) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptId() {
if (isLocal()) return promptIdLocal();
else if (isHTTP()) return promptIdHTTP();
else return promptIdOME(); // isOME
}
/**
* Gets the filename (id) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptIdLocal() {
if (firstTime && IJ.isMacOSX()) {
String osVersion = System.getProperty("os.version");
if (osVersion == null ||
osVersion.startsWith("10.4.") ||
osVersion.startsWith("10.3.") ||
osVersion.startsWith("10.2."))
{
// present user with one-time dialog box
IJ.showMessage("Bio-Formats",
"One-time warning: There is a bug in Java on Mac OS X with the\n" +
"native file chooser that crashes ImageJ if you click on a file\n" +
"in cxd, ipw, oib or zvi format while in column view mode.\n" +
"You can work around the problem by switching to list view\n" +
"(press Command+2) or by checking the \"Use JFileChooser to\n" +
"Open/Save\" option in the Edit>Options>Input/Output... dialog.");
}
}
String ijVersion = IJ.getVersion();
if (firstTime && (ijVersion == null || ijVersion.compareTo("1.39u") < 0)) {
// present user with one-time dialog box
if (ijVersion == null) ijVersion = "unknown";
IJ.showMessage("Bio-Formats",
"One-time warning: Some features of Bio-Formats, such as the\n" +
"Data Browser and some color handling options, require ImageJ\n" +
"v1.39u or later. Your version is " + ijVersion +
"; you will need to upgrade\n" +
"if you wish to take advantage of these features.");
}
if (id == null) {
// prompt user for the filename (or grab from macro options)
OpenDialog od = new OpenDialog(LABEL_ID, id);
String dir = od.getDirectory();
String name = od.getFileName();
if (dir == null || name == null) return STATUS_CANCELED;
id = dir + name;
}
// verify that id is valid
if (id != null) idLoc = new Location(id);
if (idLoc == null || !idLoc.exists()) {
if (!quiet) {
IJ.error("Bio-Formats", idLoc == null ?
"No file was specified." :
"The specified file (" + id + ") does not exist.");
}
return STATUS_FINISHED;
}
idName = idLoc.getName();
idType = "Filename";
return STATUS_OK;
}
/**
* Gets the URL (id) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptIdHTTP() {
if (id == null) {
// prompt user for the URL (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats URL");
gd.addStringField("URL: ", "http://", 30);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
id = gd.getNextString();
}
// verify that id is valid
if (id == null) {
if (!quiet) IJ.error("Bio-Formats", "No URL was specified.");
return STATUS_FINISHED;
}
idName = id;
idType = "URL";
return STATUS_OK;
}
/**
* Gets the OME server and image (id) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptIdOME() {
if (id == null) {
// CTR FIXME -- eliminate this kludge
IJ.runPlugIn("loci.plugins.ome.OMEPlugin", "");
return STATUS_FINISHED;
}
idType = "OME address";
return STATUS_OK;
}
public int promptMergeOption(int[] nums, boolean spectral) {
if (windowless) return STATUS_OK;
GenericDialog gd = new GenericDialog("Merging Options...");
String[] options = new String[spectral ? 8 : 7];
options[6] = MERGE_DEFAULT;
if (spectral) options[7] = MERGE_PROJECTION;
for (int i=0; i<6; i++) {
options[i] = nums[i] + " planes, " + (i + 2) + " channels per plane";
}
gd.addMessage("How would you like to merge this data?");
gd.addChoice(LABEL_MERGE_OPTION, options, MERGE_DEFAULT);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
mergeOption = options[gd.getNextChoiceIndex()];
return STATUS_OK;
}
/**
* Gets option values from macro options, or user prompt if necessary.
* @return status of operation
*/
public int promptOptions() {
Vector stackTypes = new Vector();
stackTypes.add(VIEW_NONE);
stackTypes.add(VIEW_STANDARD);
if (IJ.getVersion().compareTo("1.39l") >= 0) {
stackTypes.add(VIEW_HYPERSTACK);
stackTypes.add(VIEW_BROWSER);
}
if (Checker.checkClass(CLASS_VISBIO)) stackTypes.add(VIEW_VISBIO);
if (Checker.checkClass(CLASS_IMAGE_5D)) stackTypes.add(VIEW_IMAGE_5D);
if (Checker.checkClass(CLASS_VIEW_5D)) stackTypes.add(VIEW_VIEW_5D);
final String[] stackFormats = new String[stackTypes.size()];
stackTypes.copyInto(stackFormats);
String[] stackOrders = new String[] {
ORDER_DEFAULT, ORDER_XYZCT, ORDER_XYZTC, ORDER_XYCZT, ORDER_XYCTZ,
ORDER_XYTZC, ORDER_XYTCZ
};
// prompt user for parameters (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats Import Options");
gd.addChoice(LABEL_STACK, stackFormats, stackFormat);
gd.addChoice(LABEL_ORDER, stackOrders, stackOrder);
gd.addCheckbox(LABEL_MERGE, mergeChannels);
gd.addCheckbox(LABEL_COLORIZE, colorize);
gd.addCheckbox(LABEL_C, splitChannels);
gd.addCheckbox(LABEL_Z, splitFocalPlanes);
gd.addCheckbox(LABEL_T, splitTimepoints);
gd.addCheckbox(LABEL_CROP, crop);
gd.addCheckbox(LABEL_METADATA, showMetadata);
gd.addCheckbox(LABEL_OME_XML, showOMEXML);
gd.addCheckbox(LABEL_GROUP, groupFiles);
gd.addCheckbox(LABEL_CONCATENATE, concatenate);
gd.addCheckbox(LABEL_RANGE, specifyRanges);
gd.addCheckbox(LABEL_AUTOSCALE, autoscale);
gd.addCheckbox(LABEL_VIRTUAL, virtual);
gd.addCheckbox(LABEL_RECORD, record);
gd.addCheckbox(LABEL_ALL_SERIES, openAllSeries);
gd.addCheckbox(LABEL_SWAP, swapDimensions);
// extract GUI components from dialog and add listeners
Vector labels = null;
Label stackLabel = null, orderLabel = null;
Component[] c = gd.getComponents();
if (c != null) {
labels = new Vector();
for (int i=0; i<c.length; i++) {
if (c[i] instanceof Label) {
Label item = (Label) c[i];
labels.add(item);
}
}
stackLabel = (Label) labels.get(0);
orderLabel = (Label) labels.get(1);
}
Vector choices = gd.getChoices();
if (choices != null) {
stackChoice = (Choice) choices.get(0);
orderChoice = (Choice) choices.get(1);
for (int i=0; i<choices.size(); i++) {
Choice item = (Choice) choices.get(i);
item.addFocusListener(this);
item.addItemListener(this);
item.addMouseListener(this);
}
}
Vector boxes = gd.getCheckboxes();
if (boxes != null) {
mergeBox = (Checkbox) boxes.get(0);
colorizeBox = (Checkbox) boxes.get(1);
splitCBox = (Checkbox) boxes.get(2);
splitZBox = (Checkbox) boxes.get(3);
splitTBox = (Checkbox) boxes.get(4);
cropBox = (Checkbox) boxes.get(5);
metadataBox = (Checkbox) boxes.get(6);
omexmlBox = (Checkbox) boxes.get(7);
groupBox = (Checkbox) boxes.get(8);
concatenateBox = (Checkbox) boxes.get(9);
rangeBox = (Checkbox) boxes.get(10);
autoscaleBox = (Checkbox) boxes.get(11);
virtualBox = (Checkbox) boxes.get(12);
recordBox = (Checkbox) boxes.get(13);
allSeriesBox = (Checkbox) boxes.get(14);
swapBox = (Checkbox) boxes.get(15);
for (int i=0; i<boxes.size(); i++) {
Checkbox item = (Checkbox) boxes.get(i);
item.addFocusListener(this);
item.addItemListener(this);
item.addMouseListener(this);
}
}
verifyOptions(null);
// associate information for each option
infoTable = new Hashtable();
infoTable.put(stackLabel, INFO_STACK);
infoTable.put(stackChoice, INFO_STACK);
infoTable.put(orderLabel, INFO_ORDER);
infoTable.put(orderChoice, INFO_ORDER);
infoTable.put(mergeBox, INFO_MERGE);
infoTable.put(colorizeBox, INFO_COLORIZE);
infoTable.put(splitCBox, INFO_C);
infoTable.put(splitZBox, INFO_Z);
infoTable.put(splitTBox, INFO_T);
infoTable.put(cropBox, INFO_CROP);
infoTable.put(metadataBox, INFO_METADATA);
infoTable.put(omexmlBox, INFO_OME_XML);
infoTable.put(groupBox, INFO_GROUP);
infoTable.put(concatenateBox, INFO_CONCATENATE);
infoTable.put(rangeBox, INFO_RANGE);
infoTable.put(autoscaleBox, INFO_AUTOSCALE);
infoTable.put(virtualBox, INFO_VIRTUAL);
infoTable.put(recordBox, INFO_RECORD);
infoTable.put(allSeriesBox, INFO_ALL_SERIES);
infoTable.put(swapBox, INFO_SWAP);
// rebuild dialog using FormLayout to organize things more nicely
String cols =
// first column
"pref, 3dlu, pref:grow, " +
// second column
"10dlu, pref";
String rows =
// Stack viewing | Metadata viewing
"pref, 3dlu, pref, 3dlu, pref, " +
// Dataset organization | Memory management
"9dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, " +
// Color options | Split into separate windows
"9dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, " +
// Information
"9dlu, pref, 3dlu, fill:40dlu";
// TODO: change "Merge channels into RGB" checkbox to
// "Channel merging" choice with options:
// "Default", "Merge channels" or "Separate channels"
// TODO: change "Use virtual stack" and "Record modifications to virtual
// stack" checkboxes to "Stack type" choice with options:
// "Normal", "Virtual" or "Smart virtual"
PanelBuilder builder = new PanelBuilder(new FormLayout(cols, rows));
CellConstraints cc = new CellConstraints();
// populate 1st column
int row = 1;
builder.addSeparator("Stack viewing", cc.xyw(1, row, 3));
row += 2;
builder.add(stackLabel, cc.xy(1, row));
builder.add(stackChoice, cc.xy(3, row));
row += 2;
builder.add(orderLabel, cc.xy(1, row));
builder.add(orderChoice, cc.xy(3, row));
row += 2;
builder.addSeparator("Dataset organization", cc.xyw(1, row, 3));
row += 2;
builder.add(groupBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(swapBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(allSeriesBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(concatenateBox, xyw(cc, 1, row, 3));
row += 2;
builder.addSeparator("Color options", cc.xyw(1, row, 3));
row += 2;
builder.add(mergeBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(colorizeBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(autoscaleBox, xyw(cc, 1, row, 3));
row += 2;
// populate 2nd column
row = 1;
builder.addSeparator("Metadata viewing", cc.xy(5, row));
row += 2;
builder.add(metadataBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(omexmlBox, xyw(cc, 5, row, 1));
row += 2;
builder.addSeparator("Memory management", cc.xy(5, row));
row += 2;
builder.add(virtualBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(recordBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(rangeBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(cropBox, xyw(cc, 5, row, 1));
row += 2;
builder.addSeparator("Split into separate windows", cc.xy(5, row));
row += 2;
builder.add(splitCBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(splitZBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(splitTBox, xyw(cc, 5, row, 1));
row += 2;
// information section
builder.addSeparator("Information", cc.xyw(1, row, 5));
row += 2;
infoPane = new JEditorPane();
infoPane.setContentType("text/html");
infoPane.setEditable(false);
infoPane.setText("<html>" + INFO_DEFAULT);
builder.add(new JScrollPane(infoPane), cc.xyw(1, row, 5));
row += 2;
gd.removeAll();
gd.add(builder.getPanel());
// display dialog to user and harvest results
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
stackFormat = stackFormats[gd.getNextChoiceIndex()];
stackOrder = stackOrders[gd.getNextChoiceIndex()];
mergeChannels = gd.getNextBoolean();
colorize = gd.getNextBoolean();
splitChannels = gd.getNextBoolean();
splitFocalPlanes = gd.getNextBoolean();
splitTimepoints = gd.getNextBoolean();
crop = gd.getNextBoolean();
showMetadata = gd.getNextBoolean();
showOMEXML = gd.getNextBoolean();
groupFiles = gd.getNextBoolean();
concatenate = gd.getNextBoolean();
specifyRanges = gd.getNextBoolean();
autoscale = gd.getNextBoolean();
virtual = gd.getNextBoolean();
record = gd.getNextBoolean();
openAllSeries = gd.getNextBoolean();
swapDimensions = gd.getNextBoolean();
return STATUS_OK;
}
/**
* Gets file pattern from id, macro options, or user prompt if necessary.
* @return status of operation
*/
public int promptFilePattern() {
if (windowless) return STATUS_OK;
id = FilePattern.findPattern(idLoc);
if (id == null) {
IJ.showMessage("Bio-Formats",
"Warning: Bio-Formats was unable to determine a grouping that\n" +
"includes the file you chose. The most common reason for this\n" +
"situation is that the folder contains extraneous files with " +
"similar\n" +
"names and numbers that confuse the detection algorithm.\n" +
" \n" +
"For example, if you have multiple datasets in the same folder\n" +
"named series1_z*_c*.tif, series2_z*_c*.tif, etc., Bio-Formats\n" +
"may try to group all such files into a single series.\n" +
" \n" +
"For best results, put each image series's files in their own " +
"folder,\n" +
"or type in a file pattern manually.\n");
id = idLoc.getAbsolutePath();
}
// prompt user to confirm file pattern (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats File Stitching");
int len = id.length() + 1;
if (len > 80) len = 80;
gd.addStringField("Pattern: ", id, len);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
id = gd.getNextString();
return STATUS_OK;
}
/**
* Gets which series to open from macro options, or user prompt if necessary.
* @param r The reader to use for extracting details of each series.
* @param seriesLabels Label to display to user identifying each series.
* @param series Boolean array indicating which series to include
* (populated by this method).
* @return status of operation
*/
public int promptSeries(IFormatReader r,
String[] seriesLabels, boolean[] series)
{
if (windowless) {
if (seriesString != null) {
if (seriesString.startsWith("[")) {
seriesString = seriesString.substring(1, seriesString.length() - 2);
}
Arrays.fill(series, false);
StringTokenizer tokens = new StringTokenizer(seriesString, " ");
while (tokens.hasMoreTokens()) {
String token = tokens.nextToken().trim();
int n = Integer.parseInt(token);
if (n < series.length) series[n] = true;
}
}
return STATUS_OK;
}
int seriesCount = r.getSeriesCount();
// prompt user to specify series inclusion (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats Series Options") {
public void actionPerformed(ActionEvent e) {
String cmd = e.getActionCommand();
if ("select".equals(cmd)) {
Checkbox[] boxes =
(Checkbox[]) getCheckboxes().toArray(new Checkbox[0]);
for (int i=0; i<boxes.length; i++) {
boxes[i].setState(true);
}
}
else if ("deselect".equals(cmd)) {
Checkbox[] boxes =
(Checkbox[]) getCheckboxes().toArray(new Checkbox[0]);
for (int i=0; i<boxes.length; i++) {
boxes[i].setState(false);
}
}
else {
super.actionPerformed(e);
}
}
};
GridBagLayout gdl = (GridBagLayout) gd.getLayout();
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 2;
gbc.gridwidth = GridBagConstraints.REMAINDER;
Panel[] p = new Panel[seriesCount];
for (int i=0; i<seriesCount; i++) {
gd.addCheckbox(seriesLabels[i], series[i]);
r.setSeries(i);
int sx = r.getThumbSizeX() + 10; // a little extra padding
int sy = r.getThumbSizeY();
p[i] = new Panel();
p[i].add(Box.createRigidArea(new Dimension(sx, sy)));
gbc.gridy = i;
if (forceThumbnails) {
IJ.showStatus("Reading thumbnail for series #" + (i + 1));
int z = r.getSizeZ() / 2;
int t = r.getSizeT() / 2;
int ndx = r.getIndex(z, 0, t);
try {
BufferedImage img = r.openThumbImage(ndx);
if (isAutoscale() && r.getPixelType() != FormatTools.FLOAT) {
img = AWTImageTools.autoscale(img);
}
ImageIcon icon = new ImageIcon(img);
p[i].removeAll();
p[i].add(new JLabel(icon));
}
catch (Exception e) { }
}
gdl.setConstraints(p[i], gbc);
gd.add(p[i]);
}
Util.addScrollBars(gd);
Panel buttons = new Panel();
Button select = new Button("Select All");
select.setActionCommand("select");
select.addActionListener(gd);
Button deselect = new Button("Deselect All");
deselect.setActionCommand("deselect");
deselect.addActionListener(gd);
buttons.add(select);
buttons.add(deselect);
gbc.gridx = 0;
gbc.gridy = seriesCount;
gdl.setConstraints(buttons, gbc);
gd.add(buttons);
if (forceThumbnails) gd.showDialog();
else {
ThumbLoader loader = new ThumbLoader(r, p, gd, isAutoscale());
gd.showDialog();
loader.stop();
}
if (gd.wasCanceled()) return STATUS_CANCELED;
seriesString = "[";
for (int i=0; i<seriesCount; i++) {
series[i] = gd.getNextBoolean();
if (series[i]) {
seriesString += i + " ";
}
}
seriesString += "]";
if (concatenate) {
// toggle on compatible series
// CTR FIXME -- why are we doing this?
for (int i=0; i<seriesCount; i++) {
if (series[i]) continue;
r.setSeries(i);
int sizeX = r.getSizeX();
int sizeY = r.getSizeY();
int pixelType = r.getPixelType();
int sizeC = r.getSizeC();
for (int j=0; j<seriesCount; j++) {
if (j == i || !series[j]) continue;
r.setSeries(j);
if (sizeX == r.getSizeX() && sizeY == r.getSizeY() &&
pixelType == r.getPixelType() && sizeC == r.getSizeC())
{
series[i] = true;
break;
}
}
}
}
return STATUS_OK;
}
public int promptCropSize(IFormatReader r, String[] labels, boolean[] series,
Rectangle[] box)
{
GenericDialog gd = new GenericDialog("Bio-Formats Crop Options");
for (int i=0; i<series.length; i++) {
if (!series[i]) continue;
gd.addMessage(labels[i].replaceAll("_", " "));
gd.addNumericField("X_Coordinate_" + i, 0, 0);
gd.addNumericField("Y_Coordinate_" + i, 0, 0);
gd.addNumericField("Width_" + i, 0, 0);
gd.addNumericField("Height_" + i, 0, 0);
}
Util.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
for (int i=0; i<series.length; i++) {
if (!series[i]) continue;
r.setSeries(i);
box[i].x = (int) gd.getNextNumber();
box[i].y = (int) gd.getNextNumber();
box[i].width = (int) gd.getNextNumber();
box[i].height = (int) gd.getNextNumber();
if (box[i].x < 0) box[i].x = 0;
if (box[i].y < 0) box[i].y = 0;
if (box[i].x >= r.getSizeX()) box[i].x = r.getSizeX() - box[i].width - 1;
if (box[i].y >= r.getSizeY()) box[i].y = r.getSizeY() - box[i].height - 1;
if (box[i].width < 1) box[i].width = 1;
if (box[i].height < 1) box[i].height = 1;
if (box[i].width + box[i].x > r.getSizeX()) {
box[i].width = r.getSizeX() - box[i].x;
}
if (box[i].height + box[i].y > r.getSizeY()) {
box[i].height = r.getSizeY() - box[i].y;
}
}
return STATUS_OK;
}
/**
* Gets the range of image planes to open from macro options,
* or user prompt if necessary.
* @param r The reader to use for extracting details of each series.
* @param series Boolean array indicating the series
* for which ranges should be determined.
* @param seriesLabels Label to display to user identifying each series
* @param cBegin First C index to include (populated by this method).
* @param cEnd Last C index to include (populated by this method).
* @param cStep C dimension step size (populated by this method).
* @param zBegin First Z index to include (populated by this method).
* @param zEnd Last Z index to include (populated by this method).
* @param zStep Z dimension step size (populated by this method).
* @param tBegin First T index to include (populated by this method).
* @param tEnd Last T index to include (populated by this method).
* @param tStep T dimension step size (populated by this method).
* @return status of operation
*/
public int promptRange(IFormatReader r,
boolean[] series, String[] seriesLabels,
int[] cBegin, int[] cEnd, int[] cStep,
int[] zBegin, int[] zEnd, int[] zStep,
int[] tBegin, int[] tEnd, int[] tStep)
{
int seriesCount = r.getSeriesCount();
// prompt user to specify series ranges (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats Range Options");
for (int i=0; i<seriesCount; i++) {
if (!series[i]) continue;
r.setSeries(i);
gd.addMessage(seriesLabels[i].replaceAll("_", " "));
String s = seriesCount > 1 ? "_" + (i + 1) : "";
if (r.isOrderCertain()) {
if (r.getEffectiveSizeC() > 1) {
gd.addNumericField("C_Begin" + s, cBegin[i] + 1, 0);
gd.addNumericField("C_End" + s, cEnd[i] + 1, 0);
gd.addNumericField("C_Step" + s, cStep[i], 0);
}
if (r.getSizeZ() > 1) {
gd.addNumericField("Z_Begin" + s, zBegin[i] + 1, 0);
gd.addNumericField("Z_End" + s, zEnd[i] + 1, 0);
gd.addNumericField("Z_Step" + s, zStep[i], 0);
}
if (r.getSizeT() > 1) {
gd.addNumericField("T_Begin" + s, tBegin[i] + 1, 0);
gd.addNumericField("T_End" + s, tEnd[i] + 1, 0);
gd.addNumericField("T_Step" + s, tStep[i], 0);
}
}
else {
gd.addNumericField("Begin" + s, cBegin[i] + 1, 0);
gd.addNumericField("End" + s, cEnd[i] + 1, 0);
gd.addNumericField("Step" + s, cStep[i], 0);
}
}
Util.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
for (int i=0; i<seriesCount; i++) {
if (!series[i]) continue;
r.setSeries(i);
int sizeC = r.getEffectiveSizeC();
int sizeZ = r.getSizeZ();
int sizeT = r.getSizeT();
boolean certain = r.isOrderCertain();
if (certain) {
if (r.getEffectiveSizeC() > 1) {
cBegin[i] = (int) gd.getNextNumber() - 1;
cEnd[i] = (int) gd.getNextNumber() - 1;
cStep[i] = (int) gd.getNextNumber();
}
if (r.getSizeZ() > 1) {
zBegin[i] = (int) gd.getNextNumber() - 1;
zEnd[i] = (int) gd.getNextNumber() - 1;
zStep[i] = (int) gd.getNextNumber();
}
if (r.getSizeT() > 1) {
tBegin[i] = (int) gd.getNextNumber() - 1;
tEnd[i] = (int) gd.getNextNumber() - 1;
tStep[i] = (int) gd.getNextNumber();
}
}
else {
cBegin[i] = (int) gd.getNextNumber() - 1;
cEnd[i] = (int) gd.getNextNumber() - 1;
cStep[i] = (int) gd.getNextNumber();
}
int maxC = certain ? sizeC : r.getImageCount();
if (cBegin[i] < 0) cBegin[i] = 0;
if (cBegin[i] >= maxC) cBegin[i] = maxC - 1;
if (cEnd[i] < cBegin[i]) cEnd[i] = cBegin[i];
if (cEnd[i] >= maxC) cEnd[i] = maxC - 1;
if (cStep[i] < 1) cStep[i] = 1;
if (zBegin[i] < 0) zBegin[i] = 0;
if (zBegin[i] >= sizeZ) zBegin[i] = sizeZ - 1;
if (zEnd[i] < zBegin[i]) zEnd[i] = zBegin[i];
if (zEnd[i] >= sizeZ) zEnd[i] = sizeZ - 1;
if (zStep[i] < 1) zStep[i] = 1;
if (tBegin[i] < 0) tBegin[i] = 0;
if (tBegin[i] >= sizeT) tBegin[i] = sizeT - 1;
if (tEnd[i] < tBegin[i]) tEnd[i] = tBegin[i];
if (tEnd[i] >= sizeT) tEnd[i] = sizeT - 1;
if (tStep[i] < 1) tStep[i] = 1;
}
return STATUS_OK;
}
/** Prompt for dimension swapping options. */
public int promptSwap(DimensionSwapper r, boolean[] series) {
GenericDialog gd = new GenericDialog("Dimension swapping options");
int oldSeries = r.getSeries();
String[] labels = new String[] {"Z", "C", "T"};
for (int n=0; n<r.getSeriesCount(); n++) {
if (!series[n]) continue;
r.setSeries(n);
gd.addMessage("Series " + n + ":\n");
int[] axisSizes = new int[] {r.getSizeZ(), r.getSizeC(), r.getSizeT()};
for (int i=0; i<labels.length; i++) {
gd.addChoice(axisSizes[i] + "_planes", labels, labels[i]);
}
}
Util.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
for (int n=0; n<r.getSeriesCount(); n++) {
r.setSeries(n);
String z = gd.getNextChoice();
String c = gd.getNextChoice();
String t = gd.getNextChoice();
if (z.equals(t) || z.equals(c) || c.equals(t)) {
IJ.error("Invalid swapping options - each axis can be used only once.");
return promptSwap(r, series);
}
String originalOrder = r.getDimensionOrder();
StringBuffer sb = new StringBuffer();
sb.append("XY");
for (int i=2; i<originalOrder.length(); i++) {
if (originalOrder.charAt(i) == 'Z') sb.append(z);
else if (originalOrder.charAt(i) == 'C') sb.append(c);
else if (originalOrder.charAt(i) == 'T') sb.append(t);
}
r.swapDimensions(sb.toString());
}
r.setSeries(oldSeries);
return STATUS_OK;
}
// -- FocusListener methods --
/** Handles information pane updates when component focus changes. */
public void focusGained(FocusEvent e) {
Object src = e.getSource();
String text = (String) infoTable.get(src);
infoPane.setText("<html>" + text);
}
public void focusLost(FocusEvent e) { }
// -- ItemListener methods --
/** Handles toggling of mutually exclusive options. */
public void itemStateChanged(ItemEvent e) {
verifyOptions(e.getSource());
}
// -- MouseListener methods --
/** Focuses the component upon mouseover. */
public void mouseEntered(MouseEvent e) {
Object src = e.getSource();
if (src instanceof Component) {
((Component) src).requestFocusInWindow();
}
}
public void mouseClicked(MouseEvent e) { }
public void mouseExited(MouseEvent e) { }
public void mousePressed(MouseEvent e) { }
public void mouseReleased(MouseEvent e) { }
// -- Static helper methods --
private static boolean getMacroValue(String options,
String key, boolean defaultValue)
{
String s = Macro.getValue(options, key, null);
return s == null ? defaultValue : s.equalsIgnoreCase("true");
}
private static String info(String label) {
return "<b>" + label.replaceAll("[_:]", " ").trim() + "</b> - ";
}
private static CellConstraints xyw(CellConstraints cc, int x, int y, int w) {
return cc.xyw(x, y, w, CellConstraints.LEFT, CellConstraints.CENTER);
}
// -- Helper methods --
/** Ensures that the options dialog has no mutually exclusive options. */
private void verifyOptions(Object src) {
// record GUI state
//boolean stackEnabled = stackChoice.isEnabled();
boolean orderEnabled = orderChoice.isEnabled();
boolean mergeEnabled = mergeBox.isEnabled();
boolean colorizeEnabled = colorizeBox.isEnabled();
boolean splitCEnabled = splitCBox.isEnabled();
boolean splitZEnabled = splitZBox.isEnabled();
boolean splitTEnabled = splitTBox.isEnabled();
boolean metadataEnabled = metadataBox.isEnabled();
boolean omexmlEnabled = omexmlBox.isEnabled();
boolean groupEnabled = groupBox.isEnabled();
boolean concatenateEnabled = concatenateBox.isEnabled();
boolean rangeEnabled = rangeBox.isEnabled();
boolean autoscaleEnabled = autoscaleBox.isEnabled();
boolean virtualEnabled = virtualBox.isEnabled();
boolean recordEnabled = recordBox.isEnabled();
boolean allSeriesEnabled = allSeriesBox.isEnabled();
boolean cropEnabled = cropBox.isEnabled();
boolean swapEnabled = swapBox.isEnabled();
boolean isStackNone = false;
boolean isStackStandard = false;
boolean isStackHyperstack = false;
boolean isStackBrowser = false;
boolean isStackVisBio = false;
boolean isStackImage5D = false;
boolean isStackView5D = false;
String stackValue = stackChoice.getSelectedItem();
if (stackValue.equals(VIEW_NONE)) isStackNone = true;
else if (stackValue.equals(VIEW_STANDARD)) isStackStandard = true;
else if (stackValue.equals(VIEW_HYPERSTACK)) isStackHyperstack = true;
else if (stackValue.equals(VIEW_BROWSER)) isStackBrowser = true;
else if (stackValue.equals(VIEW_VISBIO)) isStackVisBio = true;
else if (stackValue.equals(VIEW_IMAGE_5D)) isStackImage5D = true;
else if (stackValue.equals(VIEW_VIEW_5D)) isStackView5D = true;
String orderValue = orderChoice.getSelectedItem();
boolean isMerge = mergeBox.getState();
boolean isColorize = colorizeBox.getState();
boolean isSplitC = splitCBox.getState();
boolean isSplitZ = splitZBox.getState();
boolean isSplitT = splitTBox.getState();
boolean isMetadata = metadataBox.getState();
boolean isOMEXML = omexmlBox.getState();
boolean isGroup = groupBox.getState();
boolean isConcatenate = concatenateBox.getState();
boolean isRange = rangeBox.getState();
boolean isAutoscale = autoscaleBox.getState();
boolean isVirtual = virtualBox.getState();
boolean isRecord = recordBox.getState();
boolean isAllSeries = allSeriesBox.getState();
boolean isCrop = cropBox.getState();
boolean isSwap = swapBox.getState();
// toggle availability of each option based on state of earlier options
// NB: The order the options are examined here defines their order of
// precedence. This ordering is necessary because it affects which
// component states are capable of graying out other components.
// For example, we want to disable autoscaleBox when virtualBox is checked,
// so the virtualBox logic must appear before the autoscaleBox logic.
// To make it more intuitive for the user, the order of precedence should
// match the component layout from left to right, top to bottom, according
// to subsection.
// == Stack viewing ==
// orderChoice
orderEnabled = !isStackNone && !isStackHyperstack && !isStackBrowser;
if (src == stackChoice) {
orderValue = isStackHyperstack || isStackBrowser ?
ORDER_XYCZT : ORDER_DEFAULT;
}
// == Metadata viewing ==
// metadataBox
metadataEnabled = !isStackNone;
if (!metadataEnabled) isMetadata = true;
// omexmlBox
// NB: no other options affect omexmlBox
// == Dataset organization ==
// groupBox
// NB: no other options affect groupBox
groupEnabled = !isOME() && !isOMERO();
if (!groupEnabled) isGroup = false;
else if (src == stackChoice && isStackBrowser) isGroup = true;
// swapBox
// NB: no other options affect swapBox
// allSeriesBox
// NB: no other options affect allSeriesBox
// concatenateBox
// NB: no other options affect concatenateBox
// == Memory management ==
// virtualBox
virtualEnabled = !isStackNone;
if (!virtualEnabled) isVirtual = false;
else if (src == stackChoice && isStackBrowser) isVirtual = true;
// recordBox
recordEnabled = isVirtual;
if (!recordEnabled) isRecord = false;
// rangeBox
rangeEnabled = !isStackNone;
if (!rangeEnabled) isRange = false;
// cropBox
cropEnabled = !isStackNone;
if (!cropEnabled) isCrop = false;
// == Color options ==
// mergeBox
mergeEnabled = !isStackImage5D;
if (!mergeEnabled) isMerge = false;
// colorizeBox
colorizeEnabled = !isMerge;
if (!colorizeEnabled) isColorize = false;
// autoscaleBox
autoscaleEnabled = !isVirtual;
if (!autoscaleEnabled) isAutoscale = false;
// == Split into separate windows ==
boolean splitEnabled = !isStackNone && !isStackBrowser && !isVirtual;
// TODO: make splitting work with Data Browser & virtual stacks
// splitCBox
splitCEnabled = splitEnabled && !isMerge;
if (!splitCEnabled) isSplitC = false;
// splitZBox
splitZEnabled = splitEnabled;
if (!splitZEnabled) isSplitZ = false;
// splitTBox
splitTEnabled = splitEnabled;
if (!splitTEnabled) isSplitT = false;
// update state of each option, in case anything changed
//stackChoice.setEnabled(stackEnabled);
orderChoice.setEnabled(orderEnabled);
mergeBox.setEnabled(mergeEnabled);
colorizeBox.setEnabled(colorizeEnabled);
splitCBox.setEnabled(splitCEnabled);
splitZBox.setEnabled(splitZEnabled);
splitTBox.setEnabled(splitTEnabled);
metadataBox.setEnabled(metadataEnabled);
omexmlBox.setEnabled(omexmlEnabled);
groupBox.setEnabled(groupEnabled);
concatenateBox.setEnabled(concatenateEnabled);
rangeBox.setEnabled(rangeEnabled);
autoscaleBox.setEnabled(autoscaleEnabled);
virtualBox.setEnabled(virtualEnabled);
recordBox.setEnabled(recordEnabled);
allSeriesBox.setEnabled(allSeriesEnabled);
cropBox.setEnabled(cropEnabled);
swapBox.setEnabled(swapEnabled);
//stackChoice.select(stackValue);
orderChoice.select(orderValue);
mergeBox.setState(isMerge);
colorizeBox.setState(isColorize);
splitCBox.setState(isSplitC);
splitZBox.setState(isSplitZ);
splitTBox.setState(isSplitT);
metadataBox.setState(isMetadata);
omexmlBox.setState(isOMEXML);
groupBox.setState(isGroup);
concatenateBox.setState(isConcatenate);
rangeBox.setState(isRange);
autoscaleBox.setState(isAutoscale);
virtualBox.setState(isVirtual);
recordBox.setState(isRecord);
allSeriesBox.setState(isAllSeries);
cropBox.setState(isCrop);
swapBox.setState(isSwap);
// HACK - workaround a Mac OS X bug where GUI components do not update
// This trick works by changing each affected component's background color
// as subtly as possible, then changing it back after a brief delay.
// On an afflicted system the background color will end up "out of sync"
// but it is very difficult to tell because the difference is minimal.
// list of affected components
Component[] c = {
stackChoice,
orderChoice,
mergeBox,
colorizeBox,
splitCBox,
splitZBox,
splitTBox,
metadataBox,
omexmlBox,
groupBox,
concatenateBox,
rangeBox,
autoscaleBox,
virtualBox,
recordBox,
allSeriesBox,
cropBox,
swapBox
};
// record original background colors and change subtly
Color[] bgColor = new Color[c.length];
for (int i=0; i<c.length; i++) {
Color bg = c[i].getBackground();
bgColor[i] = c[i].isBackgroundSet() ? bg : null;
int red = bg.getRed();
if (red < 255) red++;
else red--;
c[i].setBackground(new Color(red, bg.getGreen(), bg.getBlue()));
}
// brief delay
try {
Thread.sleep(10);
}
catch (InterruptedException exc) { }
// change backgrounds back
for (int i=0; i<c.length; i++) c[i].setBackground(bgColor[i]);
}
}
| components/loci-plugins/src/loci/plugins/ImporterOptions.java | //
// ImporterOptions.java
//
/*
LOCI Plugins for ImageJ: a collection of ImageJ plugins including the
Bio-Formats Importer, Bio-Formats Exporter, Bio-Formats Macro Extensions,
Data Browser, Stack Colorizer and Stack Slicer. Copyright (C) 2005-@year@
Melissa Linkert, Curtis Rueden and Christopher Peterson.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
package loci.plugins;
import com.jgoodies.forms.builder.PanelBuilder;
import com.jgoodies.forms.layout.CellConstraints;
import com.jgoodies.forms.layout.FormLayout;
import ij.*;
import ij.gui.GenericDialog;
import ij.io.OpenDialog;
import java.awt.*;
import java.awt.event.*;
import java.awt.image.BufferedImage;
import java.util.*;
import javax.swing.*;
import loci.common.*;
import loci.formats.*;
/**
* Helper class for managing Bio-Formats Importer options.
* Gets parameter values through a variety of means, including
* preferences from IJ_Prefs.txt, plugin argument string, macro options,
* and user input from dialog boxes.
*
* <dl><dt><b>Source code:</b></dt>
* <dd><a href="https://skyking.microscopy.wisc.edu/trac/java/browser/trunk/components/loci-plugins/src/loci/plugins/ImporterOptions.java">Trac</a>,
* <a href="https://skyking.microscopy.wisc.edu/svn/java/trunk/components/loci-plugins/src/loci/plugins/ImporterOptions.java">SVN</a></dd></dl>
*/
public class ImporterOptions
implements FocusListener, ItemListener, MouseListener
{
// -- Constants --
// enumeration for status
public static final int STATUS_OK = 0;
public static final int STATUS_CANCELED = 1;
public static final int STATUS_FINISHED = 2;
// enumeration for stackFormat
public static final String VIEW_NONE = "Metadata only";
public static final String VIEW_STANDARD = "Standard ImageJ";
public static final String VIEW_HYPERSTACK = "Hyperstack";
public static final String VIEW_BROWSER = "Data Browser";
public static final String VIEW_VISBIO = "VisBio";
public static final String VIEW_IMAGE_5D = "Image5D";
public static final String VIEW_VIEW_5D = "View5D";
// enumeration for stackOrder
public static final String ORDER_DEFAULT = "Default";
public static final String ORDER_XYZCT = "XYZCT";
public static final String ORDER_XYZTC = "XYZTC";
public static final String ORDER_XYCZT = "XYCZT";
public static final String ORDER_XYTCZ = "XYTCZ";
public static final String ORDER_XYCTZ = "XYCTZ";
public static final String ORDER_XYTZC = "XYTZC";
// merging options
public static final String MERGE_DEFAULT = "Do not merge";
public static final String MERGE_PROJECTION = "Spectral projection";
// class to check for each viewing option
private static final String CLASS_VISBIO = "loci.visbio.VisBio";
private static final String CLASS_IMAGE_5D = "i5d.Image5D";
private static final String CLASS_VIEW_5D = "View5D_";
// enumeration for location
public static final String LOCATION_LOCAL = "Local machine";
public static final String LOCATION_HTTP = "Internet";
public static final String LOCATION_OME = "OME server";
public static final String LOCATION_OMERO = "OMERO server";
public static final String[] LOCATIONS = {
LOCATION_LOCAL, LOCATION_HTTP, LOCATION_OME, LOCATION_OMERO
};
// keys for use in IJ_Prefs.txt
public static final String PREF_STACK = "bioformats.stackFormat";
public static final String PREF_ORDER = "bioformats.stackOrder";
public static final String PREF_MERGE = "bioformats.mergeChannels";
public static final String PREF_COLORIZE = "bioformats.colorize";
public static final String PREF_C = "bioformats.splitWindows";
public static final String PREF_Z = "bioformats.splitFocalPlanes";
public static final String PREF_T = "bioformats.splitTimepoints";
public static final String PREF_CROP = "bioformats.crop";
public static final String PREF_METADATA = "bioformats.showMetadata";
public static final String PREF_OME_XML = "bioformats.showOMEXML";
public static final String PREF_GROUP = "bioformats.groupFiles";
public static final String PREF_CONCATENATE = "bioformats.concatenate";
public static final String PREF_RANGE = "bioformats.specifyRanges";
public static final String PREF_AUTOSCALE = "bioformats.autoscale";
public static final String PREF_VIRTUAL = "bioformats.virtual";
public static final String PREF_RECORD = "bioformats.record";
public static final String PREF_ALL_SERIES = "bioformats.openAllSeries";
public static final String PREF_MERGE_OPTION = "bioformats.mergeOption";
public static final String PREF_WINDOWLESS = "bioformats.windowless";
public static final String PREF_SERIES = "bioformats.series";
public static final String PREF_FIRST = "bioformats.firstTime";
public static final String PREF_THUMBNAIL = "bioformats.forceThumbnails";
public static final String PREF_SWAP = "bioformats.swapDimensions";
// labels for user dialog; when trimmed these double as argument & macro keys
public static final String LABEL_STACK = "View stack with: ";
public static final String LABEL_ORDER = "Stack_order: ";
public static final String LABEL_MERGE = "Merge_channels to RGB";
public static final String LABEL_COLORIZE = "Colorize channels";
public static final String LABEL_C = "Split_channels";
public static final String LABEL_Z = "Split_focal planes";
public static final String LABEL_T = "Split_timepoints";
public static final String LABEL_CROP = "Crop on import";
public static final String LABEL_METADATA =
"Display_metadata in results window";
public static final String LABEL_OME_XML = "Display_OME-XML metadata";
public static final String LABEL_GROUP = "Group_files with similar names";
public static final String LABEL_CONCATENATE =
"Concatenate_series when compatible";
public static final String LABEL_RANGE = "Specify_range for each series";
public static final String LABEL_AUTOSCALE = "Autoscale images";
public static final String LABEL_VIRTUAL = "Use_virtual_stack";
public static final String LABEL_RECORD =
"Record_modifications_to_virtual_stack";
public static final String LABEL_ALL_SERIES = "Open_all_series";
public static final String LABEL_SWAP = "Swap_dimensions";
public static final String LABEL_MERGE_OPTION = "Merging Options";
public static final String LABEL_WINDOWLESS = "windowless";
public static final String LABEL_SERIES = "series";
public static final String LABEL_LOCATION = "Location: ";
public static final String LABEL_ID = "Open";
// informative description of each option
public static final String INFO_STACK =
info(LABEL_STACK) + " Description to go here.";
public static final String INFO_ORDER =
info(LABEL_ORDER) + " Description to go here.";
public static final String INFO_MERGE =
info(LABEL_MERGE) + " Description to go here.";
public static final String INFO_COLORIZE =
info(LABEL_COLORIZE) + " Description to go here.";
public static final String INFO_C =
info(LABEL_C) + " Description to go here.";
public static final String INFO_Z =
info(LABEL_Z) + " Description to go here.";
public static final String INFO_T =
info(LABEL_T) + " Description to go here.";
public static final String INFO_CROP =
info(LABEL_CROP) + " Description to go here.";
public static final String INFO_METADATA =
info(LABEL_METADATA) + " Description to go here.";
public static final String INFO_OME_XML =
info(LABEL_OME_XML) + " Description to go here.";
public static final String INFO_GROUP =
info(LABEL_GROUP) + " Description to go here.";
public static final String INFO_CONCATENATE =
info(LABEL_CONCATENATE) + " Description to go here.";
public static final String INFO_RANGE =
info(LABEL_RANGE) + " Description to go here.";
public static final String INFO_AUTOSCALE =
info(LABEL_AUTOSCALE) + " Description to go here.";
public static final String INFO_VIRTUAL =
info(LABEL_VIRTUAL) + " Description to go here.";
public static final String INFO_RECORD =
info(LABEL_RECORD) + " Description to go here.";
public static final String INFO_ALL_SERIES =
info(LABEL_ALL_SERIES) + " Description to go here.";
public static final String INFO_SWAP =
info(LABEL_SWAP) + " Description to go here.";
public static final String INFO_DEFAULT =
"<i>Mouse over an option for a description.</i>";
// -- Fields - GUI components --
private Choice stackChoice;
private Choice orderChoice;
private Checkbox mergeBox;
private Checkbox colorizeBox;
private Checkbox splitCBox;
private Checkbox splitZBox;
private Checkbox splitTBox;
private Checkbox metadataBox;
private Checkbox omexmlBox;
private Checkbox groupBox;
private Checkbox concatenateBox;
private Checkbox rangeBox;
private Checkbox autoscaleBox;
private Checkbox virtualBox;
private Checkbox recordBox;
private Checkbox allSeriesBox;
private Checkbox cropBox;
private Checkbox swapBox;
private Hashtable infoTable;
private JEditorPane infoPane;
private Choice mergeChoice;
// -- Fields - core options --
private boolean firstTime;
private String stackFormat;
private String stackOrder;
private boolean mergeChannels;
private boolean colorize;
private boolean splitChannels;
private boolean splitFocalPlanes;
private boolean splitTimepoints;
private boolean crop;
private boolean showMetadata;
private boolean showOMEXML;
private boolean groupFiles;
private boolean concatenate;
private boolean specifyRanges;
private boolean autoscale;
private boolean virtual;
private boolean record;
private boolean openAllSeries;
private boolean swapDimensions;
private String mergeOption;
private boolean windowless;
private String seriesString;
private boolean forceThumbnails;
private String location;
private String id;
private boolean quiet;
private Location idLoc;
private String idName;
private String idType;
// -- ImporterOptions methods - accessors --
public boolean isFirstTime() { return firstTime; }
public String getStackFormat() { return stackFormat; }
public String getStackOrder() { return stackOrder; }
public boolean isMergeChannels() { return mergeChannels; }
public boolean isColorize() { return colorize; }
public boolean isSplitChannels() { return splitChannels; }
public boolean isSplitFocalPlanes() { return splitFocalPlanes; }
public boolean isSplitTimepoints() { return splitTimepoints; }
public boolean isShowMetadata() { return showMetadata; }
public boolean isShowOMEXML() { return showOMEXML; }
public boolean isGroupFiles() { return groupFiles; }
public boolean isConcatenate() { return concatenate; }
public boolean isSpecifyRanges() { return specifyRanges; }
public boolean isForceThumbnails() { return forceThumbnails; }
public boolean isAutoscale() { return autoscale; }
public boolean isWindowless() { return windowless; }
public boolean isVirtual() { return virtual; }
public boolean isRecord() { return record; }
public boolean openAllSeries() { return openAllSeries; }
public boolean doCrop() { return crop; }
public boolean isSwapDimensions() { return swapDimensions; }
public String getMergeOption() { return mergeOption; }
public boolean isViewNone() { return VIEW_NONE.equals(stackFormat); }
public boolean isViewStandard() { return VIEW_STANDARD.equals(stackFormat); }
public boolean isViewHyperstack() {
return VIEW_HYPERSTACK.equals(stackFormat);
}
public boolean isViewBrowser() { return VIEW_BROWSER.equals(stackFormat); }
public boolean isViewVisBio() { return VIEW_VISBIO.equals(stackFormat); }
public boolean isViewImage5D() { return VIEW_IMAGE_5D.equals(stackFormat); }
public boolean isViewView5D() { return VIEW_VIEW_5D.equals(stackFormat); }
public String getLocation() { return location; }
public String getId() { return id; }
public boolean isQuiet() { return quiet; }
public boolean isLocal() { return LOCATION_LOCAL.equals(location); }
public boolean isHTTP() { return LOCATION_HTTP.equals(location); }
public boolean isOME() { return LOCATION_OME.equals(location); }
public boolean isOMERO() { return LOCATION_OMERO.equals(location); }
public Location getIdLocation() { return idLoc; }
public String getIdName() { return idName; }
public String getIdType() { return idType; }
// -- ImporterOptions methods - mutators --
public void setStackFormat(String s) { stackFormat = s; }
public void setStackOrder(String s) { stackOrder = s; }
public void setMergeChannels(boolean b) { mergeChannels = b; }
public void setColorize(boolean b) { colorize = b; }
public void setSplitChannels(boolean b) { splitChannels = b; }
public void setSplitFocalPlanes(boolean b) { splitFocalPlanes = b; }
public void setSplitTimepoints(boolean b) { splitTimepoints = b; }
public void setShowMetadata(boolean b) { showMetadata = b; }
public void setShowOMEXML(boolean b) { showOMEXML = b; }
public void setGroupFiles(boolean b) { groupFiles = b; }
public void setConcatenate(boolean b) { concatenate = b; }
public void setSpecifyRanges(boolean b) { specifyRanges = b; }
public void setForceThumbnails(boolean b) { forceThumbnails = b; }
public void setAutoscale(boolean b) { autoscale = b; }
public void setWindowless(boolean b) { windowless = b; }
public void setVirtual(boolean b) { virtual = b; }
public void setRecord(boolean b) { record = b; }
public void setOpenAllSeries(boolean b) { openAllSeries = b; }
public void setCrop(boolean b) { crop = b; }
public void setSwapDimensions(boolean b) { swapDimensions = b; }
// -- ImporterOptions methods --
/** Loads default option values from IJ_Prefs.txt. */
public void loadPreferences() {
stackFormat = Prefs.get(PREF_STACK, VIEW_STANDARD);
stackOrder = Prefs.get(PREF_ORDER, ORDER_DEFAULT);
mergeChannels = Prefs.get(PREF_MERGE, false);
colorize = Prefs.get(PREF_COLORIZE, true);
splitChannels = Prefs.get(PREF_C, true);
splitFocalPlanes = Prefs.get(PREF_Z, false);
splitTimepoints = Prefs.get(PREF_T, false);
crop = Prefs.get(PREF_CROP, false);
showMetadata = Prefs.get(PREF_METADATA, false);
showOMEXML = Prefs.get(PREF_OME_XML, false);
groupFiles = Prefs.get(PREF_GROUP, false);
concatenate = Prefs.get(PREF_CONCATENATE, false);
specifyRanges = Prefs.get(PREF_RANGE, false);
autoscale = Prefs.get(PREF_AUTOSCALE, true);
virtual = Prefs.get(PREF_VIRTUAL, false);
record = Prefs.get(PREF_RECORD, true);
openAllSeries = Prefs.get(PREF_ALL_SERIES, false);
swapDimensions = Prefs.get(PREF_SWAP, false);
mergeOption = Prefs.get(PREF_MERGE_OPTION, MERGE_DEFAULT);
windowless = Prefs.get(PREF_WINDOWLESS, false);
seriesString = Prefs.get(PREF_SERIES, "0");
firstTime = Prefs.get(PREF_FIRST, true);
forceThumbnails = Prefs.get(PREF_THUMBNAIL, false);
}
/** Saves option values to IJ_Prefs.txt as the new defaults. */
public void savePreferences() {
Prefs.set(PREF_STACK, stackFormat);
Prefs.set(PREF_ORDER, stackOrder);
Prefs.set(PREF_MERGE, mergeChannels);
Prefs.set(PREF_COLORIZE, colorize);
Prefs.set(PREF_C, splitChannels);
Prefs.set(PREF_Z, splitFocalPlanes);
Prefs.set(PREF_T, splitTimepoints);
Prefs.set(PREF_CROP, crop);
Prefs.set(PREF_METADATA, showMetadata);
Prefs.set(PREF_OME_XML, showOMEXML);
Prefs.set(PREF_GROUP, groupFiles);
Prefs.set(PREF_CONCATENATE, concatenate);
Prefs.set(PREF_RANGE, specifyRanges);
Prefs.set(PREF_AUTOSCALE, autoscale);
Prefs.set(PREF_VIRTUAL, virtual);
Prefs.set(PREF_RECORD, record);
Prefs.set(PREF_ALL_SERIES, openAllSeries);
Prefs.set(PREF_SWAP, swapDimensions);
Prefs.set(PREF_MERGE_OPTION, mergeOption);
Prefs.set(PREF_WINDOWLESS, windowless);
Prefs.set(PREF_SERIES, seriesString);
Prefs.set(PREF_FIRST, false);
//Prefs.set(PREF_THUMBNAIL, forceThumbnails);
}
/** Parses the plugin argument for parameter values. */
public void parseArg(String arg) {
if (arg == null || arg.length() == 0) return;
if (new Location(arg).exists()) {
// old style arg: entire argument is a file path
// this style is used by the HandleExtraFileTypes plugin
// NB: This functionality must not be removed, or the plugin
// will stop working correctly with HandleExtraFileTypes.
location = LOCATION_LOCAL;
id = arg;
quiet = true; // suppress obnoxious error messages and such
}
else {
// new style arg: split up similar to a macro options string, but
// slightly different than macro options, in that boolean arguments
// must be of the form "key=true" rather than just "key"
// only the core options are supported for now
// NB: This functionality enables multiple plugin entries to achieve
// distinct behavior by calling the LociImporter plugin differently.
stackFormat = Macro.getValue(arg, LABEL_STACK, stackFormat);
stackOrder = Macro.getValue(arg, LABEL_ORDER, stackOrder);
mergeChannels = getMacroValue(arg, LABEL_MERGE, mergeChannels);
colorize = getMacroValue(arg, LABEL_COLORIZE, colorize);
splitChannels = getMacroValue(arg, LABEL_C, splitChannels);
splitFocalPlanes = getMacroValue(arg, LABEL_Z, splitFocalPlanes);
splitTimepoints = getMacroValue(arg, LABEL_T, splitTimepoints);
crop = getMacroValue(arg, LABEL_CROP, crop);
showMetadata = getMacroValue(arg, LABEL_METADATA, showMetadata);
showOMEXML = getMacroValue(arg, LABEL_OME_XML, showOMEXML);
groupFiles = getMacroValue(arg, LABEL_GROUP, groupFiles);
concatenate = getMacroValue(arg, LABEL_CONCATENATE, concatenate);
specifyRanges = getMacroValue(arg, LABEL_RANGE, specifyRanges);
autoscale = getMacroValue(arg, LABEL_AUTOSCALE, autoscale);
virtual = getMacroValue(arg, LABEL_VIRTUAL, virtual);
record = getMacroValue(arg, LABEL_RECORD, record);
openAllSeries = getMacroValue(arg, LABEL_ALL_SERIES, openAllSeries);
swapDimensions = getMacroValue(arg, LABEL_SWAP, swapDimensions);
mergeOption = Macro.getValue(arg, LABEL_MERGE_OPTION, mergeOption);
windowless = getMacroValue(arg, LABEL_WINDOWLESS, windowless);
seriesString = Macro.getValue(arg, LABEL_SERIES, "0");
location = Macro.getValue(arg, LABEL_LOCATION, location);
id = Macro.getValue(arg, LABEL_ID, id);
}
}
/**
* Gets the location (type of data source) from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptLocation() {
if (location == null) {
// Open a dialog asking the user what kind of dataset to handle.
// Ask only if the location was not already specified somehow.
// ImageJ will grab the value from the macro options, when possible.
GenericDialog gd = new GenericDialog("Bio-Formats Dataset Location");
gd.addChoice(LABEL_LOCATION, LOCATIONS, LOCATION_LOCAL);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
location = gd.getNextChoice();
}
// verify that location is valid
boolean isLocal = LOCATION_LOCAL.equals(location);
boolean isHTTP = LOCATION_HTTP.equals(location);
boolean isOME = LOCATION_OME.equals(location);
boolean isOMERO = LOCATION_OMERO.equals(location);
if (!isLocal && !isHTTP && !isOME && !isOMERO) {
if (!quiet) IJ.error("Bio-Formats", "Invalid location: " + location);
return STATUS_FINISHED;
}
return STATUS_OK;
}
/**
* Gets the id (e.g., filename or URL) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptId() {
if (isLocal()) return promptIdLocal();
else if (isHTTP()) return promptIdHTTP();
else return promptIdOME(); // isOME
}
/**
* Gets the filename (id) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptIdLocal() {
if (firstTime && IJ.isMacOSX()) {
String osVersion = System.getProperty("os.version");
if (osVersion == null ||
osVersion.startsWith("10.4.") ||
osVersion.startsWith("10.3.") ||
osVersion.startsWith("10.2."))
{
// present user with one-time dialog box
IJ.showMessage("Bio-Formats",
"One-time warning: There is a bug in Java on Mac OS X with the\n" +
"native file chooser that crashes ImageJ if you click on a file\n" +
"in cxd, ipw, oib or zvi format while in column view mode.\n" +
"You can work around the problem by switching to list view\n" +
"(press Command+2) or by checking the \"Use JFileChooser to\n" +
"Open/Save\" option in the Edit>Options>Input/Output... dialog.");
}
}
String ijVersion = IJ.getVersion();
if (firstTime && (ijVersion == null || ijVersion.compareTo("1.39u") < 0)) {
// present user with one-time dialog box
if (ijVersion == null) ijVersion = "unknown";
IJ.showMessage("Bio-Formats",
"One-time warning: Some features of Bio-Formats, such as the\n" +
"Data Browser and some color handling options, require ImageJ\n" +
"v1.39u or later. Your version is " + ijVersion +
"; you will need to upgrade\n" +
"if you wish to take advantage of these features.");
}
if (id == null) {
// prompt user for the filename (or grab from macro options)
OpenDialog od = new OpenDialog(LABEL_ID, id);
String dir = od.getDirectory();
String name = od.getFileName();
if (dir == null || name == null) return STATUS_CANCELED;
id = dir + name;
}
// verify that id is valid
if (id != null) idLoc = new Location(id);
if (idLoc == null || !idLoc.exists()) {
if (!quiet) {
IJ.error("Bio-Formats", idLoc == null ?
"No file was specified." :
"The specified file (" + id + ") does not exist.");
}
return STATUS_FINISHED;
}
idName = idLoc.getName();
idType = "Filename";
return STATUS_OK;
}
/**
* Gets the URL (id) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptIdHTTP() {
if (id == null) {
// prompt user for the URL (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats URL");
gd.addStringField("URL: ", "http://", 30);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
id = gd.getNextString();
}
// verify that id is valid
if (id == null) {
if (!quiet) IJ.error("Bio-Formats", "No URL was specified.");
return STATUS_FINISHED;
}
idName = id;
idType = "URL";
return STATUS_OK;
}
/**
* Gets the OME server and image (id) to open from macro options,
* or user prompt if necessary.
* @return status of operation
*/
public int promptIdOME() {
if (id == null) {
// CTR FIXME -- eliminate this kludge
IJ.runPlugIn("loci.plugins.ome.OMEPlugin", "");
return STATUS_FINISHED;
}
idType = "OME address";
return STATUS_OK;
}
public int promptMergeOption(int[] nums, boolean spectral) {
if (windowless) return STATUS_OK;
GenericDialog gd = new GenericDialog("Merging Options...");
String[] options = new String[spectral ? 8 : 7];
options[6] = MERGE_DEFAULT;
if (spectral) options[7] = MERGE_PROJECTION;
for (int i=0; i<6; i++) {
options[i] = nums[i] + " planes, " + (i + 2) + " channels per plane";
}
gd.addMessage("How would you like to merge this data?");
gd.addChoice(LABEL_MERGE_OPTION, options, MERGE_DEFAULT);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
mergeOption = options[gd.getNextChoiceIndex()];
return STATUS_OK;
}
/**
* Gets option values from macro options, or user prompt if necessary.
* @return status of operation
*/
public int promptOptions() {
Vector stackTypes = new Vector();
stackTypes.add(VIEW_NONE);
stackTypes.add(VIEW_STANDARD);
if (IJ.getVersion().compareTo("1.39l") >= 0) {
stackTypes.add(VIEW_HYPERSTACK);
stackTypes.add(VIEW_BROWSER);
}
if (Checker.checkClass(CLASS_VISBIO)) stackTypes.add(VIEW_VISBIO);
if (Checker.checkClass(CLASS_IMAGE_5D)) stackTypes.add(VIEW_IMAGE_5D);
if (Checker.checkClass(CLASS_VIEW_5D)) stackTypes.add(VIEW_VIEW_5D);
final String[] stackFormats = new String[stackTypes.size()];
stackTypes.copyInto(stackFormats);
String[] stackOrders = new String[] {
ORDER_DEFAULT, ORDER_XYZCT, ORDER_XYZTC, ORDER_XYCZT, ORDER_XYCTZ,
ORDER_XYTZC, ORDER_XYTCZ
};
// prompt user for parameters (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats Import Options");
gd.addChoice(LABEL_STACK, stackFormats, stackFormat);
gd.addChoice(LABEL_ORDER, stackOrders, stackOrder);
gd.addCheckbox(LABEL_MERGE, mergeChannels);
gd.addCheckbox(LABEL_COLORIZE, colorize);
gd.addCheckbox(LABEL_C, splitChannels);
gd.addCheckbox(LABEL_Z, splitFocalPlanes);
gd.addCheckbox(LABEL_T, splitTimepoints);
gd.addCheckbox(LABEL_CROP, crop);
gd.addCheckbox(LABEL_METADATA, showMetadata);
gd.addCheckbox(LABEL_OME_XML, showOMEXML);
gd.addCheckbox(LABEL_GROUP, groupFiles);
gd.addCheckbox(LABEL_CONCATENATE, concatenate);
gd.addCheckbox(LABEL_RANGE, specifyRanges);
gd.addCheckbox(LABEL_AUTOSCALE, autoscale);
gd.addCheckbox(LABEL_VIRTUAL, virtual);
gd.addCheckbox(LABEL_RECORD, record);
gd.addCheckbox(LABEL_ALL_SERIES, openAllSeries);
gd.addCheckbox(LABEL_SWAP, swapDimensions);
// extract GUI components from dialog and add listeners
Vector labels = null;
Label stackLabel = null, orderLabel = null;
Component[] c = gd.getComponents();
if (c != null) {
labels = new Vector();
for (int i=0; i<c.length; i++) {
if (c[i] instanceof Label) {
Label item = (Label) c[i];
labels.add(item);
}
}
stackLabel = (Label) labels.get(0);
orderLabel = (Label) labels.get(1);
}
Vector choices = gd.getChoices();
if (choices != null) {
stackChoice = (Choice) choices.get(0);
orderChoice = (Choice) choices.get(1);
for (int i=0; i<choices.size(); i++) {
Choice item = (Choice) choices.get(i);
item.addFocusListener(this);
item.addItemListener(this);
item.addMouseListener(this);
}
}
Vector boxes = gd.getCheckboxes();
if (boxes != null) {
mergeBox = (Checkbox) boxes.get(0);
colorizeBox = (Checkbox) boxes.get(1);
splitCBox = (Checkbox) boxes.get(2);
splitZBox = (Checkbox) boxes.get(3);
splitTBox = (Checkbox) boxes.get(4);
cropBox = (Checkbox) boxes.get(5);
metadataBox = (Checkbox) boxes.get(6);
omexmlBox = (Checkbox) boxes.get(7);
groupBox = (Checkbox) boxes.get(8);
concatenateBox = (Checkbox) boxes.get(9);
rangeBox = (Checkbox) boxes.get(10);
autoscaleBox = (Checkbox) boxes.get(11);
virtualBox = (Checkbox) boxes.get(12);
recordBox = (Checkbox) boxes.get(13);
allSeriesBox = (Checkbox) boxes.get(14);
swapBox = (Checkbox) boxes.get(15);
for (int i=0; i<boxes.size(); i++) {
Checkbox item = (Checkbox) boxes.get(i);
item.addFocusListener(this);
item.addItemListener(this);
item.addMouseListener(this);
}
}
verifyOptions(null);
// associate information for each option
infoTable = new Hashtable();
infoTable.put(stackLabel, INFO_STACK);
infoTable.put(stackChoice, INFO_STACK);
infoTable.put(orderLabel, INFO_ORDER);
infoTable.put(orderChoice, INFO_ORDER);
infoTable.put(mergeBox, INFO_MERGE);
infoTable.put(colorizeBox, INFO_COLORIZE);
infoTable.put(splitCBox, INFO_C);
infoTable.put(splitZBox, INFO_Z);
infoTable.put(splitTBox, INFO_T);
infoTable.put(cropBox, INFO_CROP);
infoTable.put(metadataBox, INFO_METADATA);
infoTable.put(omexmlBox, INFO_OME_XML);
infoTable.put(groupBox, INFO_GROUP);
infoTable.put(concatenateBox, INFO_CONCATENATE);
infoTable.put(rangeBox, INFO_RANGE);
infoTable.put(autoscaleBox, INFO_AUTOSCALE);
infoTable.put(virtualBox, INFO_VIRTUAL);
infoTable.put(recordBox, INFO_RECORD);
infoTable.put(allSeriesBox, INFO_ALL_SERIES);
infoTable.put(swapBox, INFO_SWAP);
// rebuild dialog using FormLayout to organize things more nicely
String cols =
// first column
"pref, 3dlu, pref:grow, " +
// second column
"10dlu, pref";
String rows =
// Stack viewing | Metadata viewing
"pref, 3dlu, pref, 3dlu, pref, " +
// Dataset organization | Memory management
"9dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, " +
// Color options | Split into separate windows
"9dlu, pref, 3dlu, pref, 3dlu, pref, 3dlu, pref, " +
// Information
"9dlu, pref, 3dlu, fill:40dlu";
// TODO: change "Merge channels into RGB" checkbox to
// "Channel merging" choice with options:
// "Default", "Merge channels" or "Separate channels"
// TODO: change "Use virtual stack" and "Record modifications to virtual
// stack" checkboxes to "Stack type" choice with options:
// "Normal", "Virtual" or "Smart virtual"
PanelBuilder builder = new PanelBuilder(new FormLayout(cols, rows));
CellConstraints cc = new CellConstraints();
// populate 1st column
int row = 1;
builder.addSeparator("Stack viewing", cc.xyw(1, row, 3));
row += 2;
builder.add(stackLabel, cc.xy(1, row));
builder.add(stackChoice, cc.xy(3, row));
row += 2;
builder.add(orderLabel, cc.xy(1, row));
builder.add(orderChoice, cc.xy(3, row));
row += 2;
builder.addSeparator("Dataset organization", cc.xyw(1, row, 3));
row += 2;
builder.add(groupBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(swapBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(allSeriesBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(concatenateBox, xyw(cc, 1, row, 3));
row += 2;
builder.addSeparator("Color options", cc.xyw(1, row, 3));
row += 2;
builder.add(mergeBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(colorizeBox, xyw(cc, 1, row, 3));
row += 2;
builder.add(autoscaleBox, xyw(cc, 1, row, 3));
row += 2;
// populate 2nd column
row = 1;
builder.addSeparator("Metadata viewing", cc.xy(5, row));
row += 2;
builder.add(metadataBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(omexmlBox, xyw(cc, 5, row, 1));
row += 2;
builder.addSeparator("Memory management", cc.xy(5, row));
row += 2;
builder.add(virtualBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(recordBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(rangeBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(cropBox, xyw(cc, 5, row, 1));
row += 2;
builder.addSeparator("Split into separate windows", cc.xy(5, row));
row += 2;
builder.add(splitCBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(splitZBox, xyw(cc, 5, row, 1));
row += 2;
builder.add(splitTBox, xyw(cc, 5, row, 1));
row += 2;
// information section
builder.addSeparator("Information", cc.xyw(1, row, 5));
row += 2;
infoPane = new JEditorPane();
infoPane.setContentType("text/html");
infoPane.setEditable(false);
infoPane.setText("<html>" + INFO_DEFAULT);
builder.add(new JScrollPane(infoPane), cc.xyw(1, row, 5));
row += 2;
gd.removeAll();
gd.add(builder.getPanel());
// display dialog to user and harvest results
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
stackFormat = stackFormats[gd.getNextChoiceIndex()];
stackOrder = stackOrders[gd.getNextChoiceIndex()];
mergeChannels = gd.getNextBoolean();
colorize = gd.getNextBoolean();
splitChannels = gd.getNextBoolean();
splitFocalPlanes = gd.getNextBoolean();
splitTimepoints = gd.getNextBoolean();
crop = gd.getNextBoolean();
showMetadata = gd.getNextBoolean();
showOMEXML = gd.getNextBoolean();
groupFiles = gd.getNextBoolean();
concatenate = gd.getNextBoolean();
specifyRanges = gd.getNextBoolean();
autoscale = gd.getNextBoolean();
virtual = gd.getNextBoolean();
record = gd.getNextBoolean();
openAllSeries = gd.getNextBoolean();
swapDimensions = gd.getNextBoolean();
return STATUS_OK;
}
/**
* Gets file pattern from id, macro options, or user prompt if necessary.
* @return status of operation
*/
public int promptFilePattern() {
if (windowless) return STATUS_OK;
id = FilePattern.findPattern(idLoc);
if (id == null) {
IJ.showMessage("Bio-Formats",
"Warning: Bio-Formats was unable to determine a grouping that\n" +
"includes the file you chose. The most common reason for this\n" +
"situation is that the folder contains extraneous files with " +
"similar\n" +
"names and numbers that confuse the detection algorithm.\n" +
" \n" +
"For example, if you have multiple datasets in the same folder\n" +
"named series1_z*_c*.tif, series2_z*_c*.tif, etc., Bio-Formats\n" +
"may try to group all such files into a single series.\n" +
" \n" +
"For best results, put each image series's files in their own " +
"folder,\n" +
"or type in a file pattern manually.\n");
id = idLoc.getAbsolutePath();
}
// prompt user to confirm file pattern (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats File Stitching");
int len = id.length() + 1;
if (len > 80) len = 80;
gd.addStringField("Pattern: ", id, len);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
id = gd.getNextString();
return STATUS_OK;
}
/**
* Gets which series to open from macro options, or user prompt if necessary.
* @param r The reader to use for extracting details of each series.
* @param seriesLabels Label to display to user identifying each series.
* @param series Boolean array indicating which series to include
* (populated by this method).
* @return status of operation
*/
public int promptSeries(IFormatReader r,
String[] seriesLabels, boolean[] series)
{
if (windowless) {
if (seriesString != null) {
if (seriesString.startsWith("[")) {
seriesString = seriesString.substring(1, seriesString.length() - 2);
}
Arrays.fill(series, false);
StringTokenizer tokens = new StringTokenizer(seriesString, " ");
while (tokens.hasMoreTokens()) {
String token = tokens.nextToken().trim();
int n = Integer.parseInt(token);
if (n < series.length) series[n] = true;
}
}
return STATUS_OK;
}
int seriesCount = r.getSeriesCount();
// prompt user to specify series inclusion (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats Series Options") {
public void actionPerformed(ActionEvent e) {
String cmd = e.getActionCommand();
if ("select".equals(cmd)) {
Checkbox[] boxes =
(Checkbox[]) getCheckboxes().toArray(new Checkbox[0]);
for (int i=0; i<boxes.length; i++) {
boxes[i].setState(true);
}
}
else if ("deselect".equals(cmd)) {
Checkbox[] boxes =
(Checkbox[]) getCheckboxes().toArray(new Checkbox[0]);
for (int i=0; i<boxes.length; i++) {
boxes[i].setState(false);
}
}
else {
super.actionPerformed(e);
}
}
};
GridBagLayout gdl = (GridBagLayout) gd.getLayout();
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = 2;
gbc.gridwidth = GridBagConstraints.REMAINDER;
Panel[] p = new Panel[seriesCount];
for (int i=0; i<seriesCount; i++) {
gd.addCheckbox(seriesLabels[i], series[i]);
r.setSeries(i);
int sx = r.getThumbSizeX() + 10; // a little extra padding
int sy = r.getThumbSizeY();
p[i] = new Panel();
p[i].add(Box.createRigidArea(new Dimension(sx, sy)));
gbc.gridy = i;
if (forceThumbnails) {
IJ.showStatus("Reading thumbnail for series #" + (i + 1));
int z = r.getSizeZ() / 2;
int t = r.getSizeT() / 2;
int ndx = r.getIndex(z, 0, t);
try {
BufferedImage img = r.openThumbImage(ndx);
if (isAutoscale() && r.getPixelType() != FormatTools.FLOAT) {
img = AWTImageTools.autoscale(img);
}
ImageIcon icon = new ImageIcon(img);
p[i].removeAll();
p[i].add(new JLabel(icon));
}
catch (Exception e) { }
}
gdl.setConstraints(p[i], gbc);
gd.add(p[i]);
}
Util.addScrollBars(gd);
Panel buttons = new Panel();
Button select = new Button("Select All");
select.setActionCommand("select");
select.addActionListener(gd);
Button deselect = new Button("Deselect All");
deselect.setActionCommand("deselect");
deselect.addActionListener(gd);
buttons.add(select);
buttons.add(deselect);
gbc.gridx = 0;
gbc.gridy = seriesCount;
gdl.setConstraints(buttons, gbc);
gd.add(buttons);
if (forceThumbnails) gd.showDialog();
else {
ThumbLoader loader = new ThumbLoader(r, p, gd, isAutoscale());
gd.showDialog();
loader.stop();
}
if (gd.wasCanceled()) return STATUS_CANCELED;
seriesString = "[";
for (int i=0; i<seriesCount; i++) {
series[i] = gd.getNextBoolean();
if (series[i]) {
seriesString += i + " ";
}
}
seriesString += "]";
if (concatenate) {
// toggle on compatible series
// CTR FIXME -- why are we doing this?
for (int i=0; i<seriesCount; i++) {
if (series[i]) continue;
r.setSeries(i);
int sizeX = r.getSizeX();
int sizeY = r.getSizeY();
int pixelType = r.getPixelType();
int sizeC = r.getSizeC();
for (int j=0; j<seriesCount; j++) {
if (j == i || !series[j]) continue;
r.setSeries(j);
if (sizeX == r.getSizeX() && sizeY == r.getSizeY() &&
pixelType == r.getPixelType() && sizeC == r.getSizeC())
{
series[i] = true;
break;
}
}
}
}
return STATUS_OK;
}
public int promptCropSize(IFormatReader r, String[] labels, boolean[] series,
Rectangle[] box)
{
GenericDialog gd = new GenericDialog("Bio-Formats Crop Options");
for (int i=0; i<series.length; i++) {
if (!series[i]) continue;
gd.addMessage(labels[i].replaceAll("_", " "));
gd.addNumericField("X_Coordinate_" + i, 0, 0);
gd.addNumericField("Y_Coordinate_" + i, 0, 0);
gd.addNumericField("Width_" + i, 0, 0);
gd.addNumericField("Height_" + i, 0, 0);
}
Util.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
for (int i=0; i<series.length; i++) {
if (!series[i]) continue;
r.setSeries(i);
box[i].x = (int) gd.getNextNumber();
box[i].y = (int) gd.getNextNumber();
box[i].width = (int) gd.getNextNumber();
box[i].height = (int) gd.getNextNumber();
if (box[i].x < 0) box[i].x = 0;
if (box[i].y < 0) box[i].y = 0;
if (box[i].x >= r.getSizeX()) box[i].x = r.getSizeX() - box[i].width - 1;
if (box[i].y >= r.getSizeY()) box[i].y = r.getSizeY() - box[i].height - 1;
if (box[i].width < 1) box[i].width = 1;
if (box[i].height < 1) box[i].height = 1;
if (box[i].width + box[i].x > r.getSizeX()) {
box[i].width = r.getSizeX() - box[i].x;
}
if (box[i].height + box[i].y > r.getSizeY()) {
box[i].height = r.getSizeY() - box[i].y;
}
}
return STATUS_OK;
}
/**
* Gets the range of image planes to open from macro options,
* or user prompt if necessary.
* @param r The reader to use for extracting details of each series.
* @param series Boolean array indicating the series
* for which ranges should be determined.
* @param seriesLabels Label to display to user identifying each series
* @param cBegin First C index to include (populated by this method).
* @param cEnd Last C index to include (populated by this method).
* @param cStep C dimension step size (populated by this method).
* @param zBegin First Z index to include (populated by this method).
* @param zEnd Last Z index to include (populated by this method).
* @param zStep Z dimension step size (populated by this method).
* @param tBegin First T index to include (populated by this method).
* @param tEnd Last T index to include (populated by this method).
* @param tStep T dimension step size (populated by this method).
* @return status of operation
*/
public int promptRange(IFormatReader r,
boolean[] series, String[] seriesLabels,
int[] cBegin, int[] cEnd, int[] cStep,
int[] zBegin, int[] zEnd, int[] zStep,
int[] tBegin, int[] tEnd, int[] tStep)
{
int seriesCount = r.getSeriesCount();
// prompt user to specify series ranges (or grab from macro options)
GenericDialog gd = new GenericDialog("Bio-Formats Range Options");
for (int i=0; i<seriesCount; i++) {
if (!series[i]) continue;
r.setSeries(i);
gd.addMessage(seriesLabels[i].replaceAll("_", " "));
String s = seriesCount > 1 ? "_" + (i + 1) : "";
if (r.isOrderCertain()) {
if (r.getEffectiveSizeC() > 1) {
gd.addNumericField("C_Begin" + s, cBegin[i] + 1, 0);
gd.addNumericField("C_End" + s, cEnd[i] + 1, 0);
gd.addNumericField("C_Step" + s, cStep[i], 0);
}
if (r.getSizeZ() > 1) {
gd.addNumericField("Z_Begin" + s, zBegin[i] + 1, 0);
gd.addNumericField("Z_End" + s, zEnd[i] + 1, 0);
gd.addNumericField("Z_Step" + s, zStep[i], 0);
}
if (r.getSizeT() > 1) {
gd.addNumericField("T_Begin" + s, tBegin[i] + 1, 0);
gd.addNumericField("T_End" + s, tEnd[i] + 1, 0);
gd.addNumericField("T_Step" + s, tStep[i], 0);
}
}
else {
gd.addNumericField("Begin" + s, cBegin[i] + 1, 0);
gd.addNumericField("End" + s, cEnd[i] + 1, 0);
gd.addNumericField("Step" + s, cStep[i], 0);
}
}
Util.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
for (int i=0; i<seriesCount; i++) {
if (!series[i]) continue;
r.setSeries(i);
int sizeC = r.getEffectiveSizeC();
int sizeZ = r.getSizeZ();
int sizeT = r.getSizeT();
boolean certain = r.isOrderCertain();
if (certain) {
if (r.getEffectiveSizeC() > 1) {
cBegin[i] = (int) gd.getNextNumber() - 1;
cEnd[i] = (int) gd.getNextNumber() - 1;
cStep[i] = (int) gd.getNextNumber();
}
if (r.getSizeZ() > 1) {
zBegin[i] = (int) gd.getNextNumber() - 1;
zEnd[i] = (int) gd.getNextNumber() - 1;
zStep[i] = (int) gd.getNextNumber();
}
if (r.getSizeT() > 1) {
tBegin[i] = (int) gd.getNextNumber() - 1;
tEnd[i] = (int) gd.getNextNumber() - 1;
tStep[i] = (int) gd.getNextNumber();
}
}
else {
cBegin[i] = (int) gd.getNextNumber() - 1;
cEnd[i] = (int) gd.getNextNumber() - 1;
cStep[i] = (int) gd.getNextNumber();
}
int maxC = certain ? sizeC : r.getImageCount();
if (cBegin[i] < 0) cBegin[i] = 0;
if (cBegin[i] >= maxC) cBegin[i] = maxC - 1;
if (cEnd[i] < cBegin[i]) cEnd[i] = cBegin[i];
if (cEnd[i] >= maxC) cEnd[i] = maxC - 1;
if (cStep[i] < 1) cStep[i] = 1;
if (zBegin[i] < 0) zBegin[i] = 0;
if (zBegin[i] >= sizeZ) zBegin[i] = sizeZ - 1;
if (zEnd[i] < zBegin[i]) zEnd[i] = zBegin[i];
if (zEnd[i] >= sizeZ) zEnd[i] = sizeZ - 1;
if (zStep[i] < 1) zStep[i] = 1;
if (tBegin[i] < 0) tBegin[i] = 0;
if (tBegin[i] >= sizeT) tBegin[i] = sizeT - 1;
if (tEnd[i] < tBegin[i]) tEnd[i] = tBegin[i];
if (tEnd[i] >= sizeT) tEnd[i] = sizeT - 1;
if (tStep[i] < 1) tStep[i] = 1;
}
return STATUS_OK;
}
/** Prompt for dimension swapping options. */
public int promptSwap(DimensionSwapper r, boolean[] series) {
GenericDialog gd = new GenericDialog("Dimension swapping options");
int oldSeries = r.getSeries();
String[] labels = new String[] {"Z", "C", "T"};
for (int n=0; n<r.getSeriesCount(); n++) {
if (!series[n]) continue;
r.setSeries(n);
gd.addMessage("Series " + n + ":\n");
int[] axisSizes = new int[] {r.getSizeZ(), r.getSizeC(), r.getSizeT()};
for (int i=0; i<labels.length; i++) {
gd.addChoice(axisSizes[i] + "_planes", labels, labels[i]);
}
}
Util.addScrollBars(gd);
gd.showDialog();
if (gd.wasCanceled()) return STATUS_CANCELED;
for (int n=0; n<r.getSeriesCount(); n++) {
r.setSeries(n);
String z = gd.getNextChoice();
String c = gd.getNextChoice();
String t = gd.getNextChoice();
if (z.equals(t) || z.equals(c) || c.equals(t)) {
IJ.error("Invalid swapping options - each axis can be used only once.");
return promptSwap(r, series);
}
String originalOrder = r.getDimensionOrder();
StringBuffer sb = new StringBuffer();
sb.append("XY");
for (int i=2; i<originalOrder.length(); i++) {
if (originalOrder.charAt(i) == 'Z') sb.append(z);
else if (originalOrder.charAt(i) == 'C') sb.append(c);
else if (originalOrder.charAt(i) == 'T') sb.append(t);
}
r.swapDimensions(sb.toString());
}
r.setSeries(oldSeries);
return STATUS_OK;
}
// -- FocusListener methods --
/** Handles information pane updates when component focus changes. */
public void focusGained(FocusEvent e) {
Object src = e.getSource();
String text = (String) infoTable.get(src);
infoPane.setText("<html>" + text);
}
public void focusLost(FocusEvent e) { }
// -- ItemListener methods --
/** Handles toggling of mutually exclusive options. */
public void itemStateChanged(ItemEvent e) {
verifyOptions(e.getSource());
}
// -- MouseListener methods --
/** Focuses the component upon mouseover. */
public void mouseEntered(MouseEvent e) {
Object src = e.getSource();
if (src instanceof Component) {
((Component) src).requestFocus();
}
}
public void mouseClicked(MouseEvent e) { }
public void mouseExited(MouseEvent e) { }
public void mousePressed(MouseEvent e) { }
public void mouseReleased(MouseEvent e) { }
// -- Static helper methods --
private static boolean getMacroValue(String options,
String key, boolean defaultValue)
{
String s = Macro.getValue(options, key, null);
return s == null ? defaultValue : s.equalsIgnoreCase("true");
}
/**
* Besides flashing the components's background red for a split second,
* this method also acts as a workaround for a Mac OS X bug that causes
* components not to be redrawn after programmatic state changes.
*/
private static void flash(Vector v) {
Color[] bg = new Color[v.size()];
for (int i=0; i<bg.length; i++) {
Component c = (Component) v.get(i);
bg[i] = c.isBackgroundSet() ? c.getBackground() : null;
c.setBackground(Color.red);
}
try {
Thread.sleep(100);
}
catch (InterruptedException exc) { }
for (int i=0; i<bg.length; i++) {
Component c = (Component) v.get(i);
c.setBackground(bg[i]);
}
}
private static String info(String label) {
return "<b>" + label.replaceAll("[_:]", " ").trim() + "</b> - ";
}
private static CellConstraints xyw(CellConstraints cc, int x, int y, int w) {
return cc.xyw(x, y, w, CellConstraints.LEFT, CellConstraints.CENTER);
}
// -- Helper methods --
/** Ensures that the options dialog has no mutually exclusive options. */
private void verifyOptions(Object src) {
// record GUI state
//boolean stackEnabled = stackChoice.isEnabled();
boolean orderEnabled = orderChoice.isEnabled();
boolean mergeEnabled = mergeBox.isEnabled();
boolean colorizeEnabled = colorizeBox.isEnabled();
boolean splitCEnabled = splitCBox.isEnabled();
boolean splitZEnabled = splitZBox.isEnabled();
boolean splitTEnabled = splitTBox.isEnabled();
boolean metadataEnabled = metadataBox.isEnabled();
boolean omexmlEnabled = omexmlBox.isEnabled();
boolean groupEnabled = groupBox.isEnabled();
boolean concatenateEnabled = concatenateBox.isEnabled();
boolean rangeEnabled = rangeBox.isEnabled();
boolean autoscaleEnabled = autoscaleBox.isEnabled();
boolean virtualEnabled = virtualBox.isEnabled();
boolean recordEnabled = recordBox.isEnabled();
boolean allSeriesEnabled = allSeriesBox.isEnabled();
boolean cropEnabled = cropBox.isEnabled();
boolean swapEnabled = swapBox.isEnabled();
boolean isStackNone = false;
boolean isStackStandard = false;
boolean isStackHyperstack = false;
boolean isStackBrowser = false;
boolean isStackVisBio = false;
boolean isStackImage5D = false;
boolean isStackView5D = false;
String stackValue = stackChoice.getSelectedItem();
if (stackValue.equals(VIEW_NONE)) isStackNone = true;
else if (stackValue.equals(VIEW_STANDARD)) isStackStandard = true;
else if (stackValue.equals(VIEW_HYPERSTACK)) isStackHyperstack = true;
else if (stackValue.equals(VIEW_BROWSER)) isStackBrowser = true;
else if (stackValue.equals(VIEW_VISBIO)) isStackVisBio = true;
else if (stackValue.equals(VIEW_IMAGE_5D)) isStackImage5D = true;
else if (stackValue.equals(VIEW_VIEW_5D)) isStackView5D = true;
String orderValue = orderChoice.getSelectedItem();
boolean isMerge = mergeBox.getState();
boolean isColorize = colorizeBox.getState();
boolean isSplitC = splitCBox.getState();
boolean isSplitZ = splitZBox.getState();
boolean isSplitT = splitTBox.getState();
boolean isMetadata = metadataBox.getState();
boolean isOMEXML = omexmlBox.getState();
boolean isGroup = groupBox.getState();
boolean isConcatenate = concatenateBox.getState();
boolean isRange = rangeBox.getState();
boolean isAutoscale = autoscaleBox.getState();
boolean isVirtual = virtualBox.getState();
boolean isRecord = recordBox.getState();
boolean isAllSeries = allSeriesBox.getState();
boolean isCrop = cropBox.getState();
boolean isSwap = swapBox.getState();
// toggle availability of each option based on state of earlier options
// NB: The order the options are examined here defines their order of
// precedence. This ordering is necessary because it affects which
// component states are capable of graying out other components.
// For example, when virtualBox is enabled, autoscaleBox is grayed out,
// so the virtualBox logic must appear before the autoscaleBox logic.
// == Stack viewing ==
// orderChoice
orderEnabled = !isStackNone && !isStackHyperstack && !isStackBrowser;
if (src == stackChoice) {
orderValue = isStackHyperstack || isStackBrowser ?
ORDER_XYCZT : ORDER_DEFAULT;
}
// == Metadata viewing ==
// metadataBox
metadataEnabled = !isStackNone;
if (!metadataEnabled) isMetadata = true;
// omexmlBox
// NB: no other options affect omexmlBox
// == Dataset organization ==
// groupBox
// NB: no other options affect groupBox
groupEnabled = !isOME() && !isOMERO();
if (!groupEnabled) isGroup = false;
else if (src == stackChoice && isStackBrowser) isGroup = true;
// swapBox
// NB: no other options affect swapBox
// allSeriesBox
// NB: no other options affect allSeriesBox
// concatenateBox
// NB: no other options affect concatenateBox
// == Memory management ==
// virtualBox
virtualEnabled = !isStackNone;
if (!virtualEnabled) isVirtual = false;
else if (src == stackChoice && isStackBrowser) isVirtual = true;
// recordBox
recordEnabled = isVirtual;
if (!recordEnabled) isRecord = false;
// rangeBox
rangeEnabled = !isStackNone;
if (!rangeEnabled) isRange = false;
// cropBox
cropEnabled = !isStackNone;
if (!cropEnabled) isCrop = false;
// == Color options ==
// mergeBox
mergeEnabled = !isStackImage5D;
if (!mergeEnabled) isMerge = false;
// colorizeBox
colorizeEnabled = !isMerge;
if (!colorizeEnabled) isColorize = false;
// autoscaleBox
autoscaleEnabled = !isVirtual;
if (!autoscaleEnabled) isAutoscale = false;
// == Split into separate windows ==
boolean splitEnabled = !isStackNone && !isStackBrowser && !isVirtual;
// TODO: make splitting work with Data Browser/virtual stacks
// splitCBox
splitCEnabled = splitEnabled && !isMerge;
if (!splitCEnabled) isSplitC = false;
// splitZBox
splitZEnabled = splitEnabled;
if (!splitZEnabled) isSplitZ = false;
// splitTBox
splitTEnabled = splitEnabled;
if (!splitTEnabled) isSplitT = false;
// update state of each option, in case anything changed
//stackChoice.setEnabled(stackEnabled);
orderChoice.setEnabled(orderEnabled);
mergeBox.setEnabled(mergeEnabled);
colorizeBox.setEnabled(colorizeEnabled);
splitCBox.setEnabled(splitCEnabled);
splitZBox.setEnabled(splitZEnabled);
splitTBox.setEnabled(splitTEnabled);
metadataBox.setEnabled(metadataEnabled);
omexmlBox.setEnabled(omexmlEnabled);
groupBox.setEnabled(groupEnabled);
concatenateBox.setEnabled(concatenateEnabled);
rangeBox.setEnabled(rangeEnabled);
autoscaleBox.setEnabled(autoscaleEnabled);
virtualBox.setEnabled(virtualEnabled);
recordBox.setEnabled(recordEnabled);
allSeriesBox.setEnabled(allSeriesEnabled);
cropBox.setEnabled(cropEnabled);
swapBox.setEnabled(swapEnabled);
//stackChoice.select(stackValue);
orderChoice.select(orderValue);
mergeBox.setState(isMerge);
colorizeBox.setState(isColorize);
splitCBox.setState(isSplitC);
splitZBox.setState(isSplitZ);
splitTBox.setState(isSplitT);
metadataBox.setState(isMetadata);
omexmlBox.setState(isOMEXML);
groupBox.setState(isGroup);
concatenateBox.setState(isConcatenate);
rangeBox.setState(isRange);
autoscaleBox.setState(isAutoscale);
virtualBox.setState(isVirtual);
recordBox.setState(isRecord);
allSeriesBox.setState(isAllSeries);
cropBox.setState(isCrop);
swapBox.setState(isSwap);
// TODO: find better workaround for Mac OS X GUI update bug
//if (changed.size() > 0) flash(changed);
}
}
| Use a better workaround for Mac OS X component refresh bug.
| components/loci-plugins/src/loci/plugins/ImporterOptions.java | Use a better workaround for Mac OS X component refresh bug. |
|
Java | bsd-3-clause | 7387ccdf7a324adbf54f96e8f5b07a7dc33aca8a | 0 | forcedotcom/phoenix,forcedotcom/phoenix | /*******************************************************************************
* Copyright (c) 2013, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package com.salesforce.phoenix.compile;
import static com.salesforce.phoenix.util.TestUtil.ATABLE_NAME;
import static com.salesforce.phoenix.util.TestUtil.TEST_PROPERTIES;
import static com.salesforce.phoenix.util.TestUtil.and;
import static com.salesforce.phoenix.util.TestUtil.assertDegenerate;
import static com.salesforce.phoenix.util.TestUtil.columnComparison;
import static com.salesforce.phoenix.util.TestUtil.constantComparison;
import static com.salesforce.phoenix.util.TestUtil.in;
import static com.salesforce.phoenix.util.TestUtil.kvColumn;
import static com.salesforce.phoenix.util.TestUtil.multiKVFilter;
import static com.salesforce.phoenix.util.TestUtil.not;
import static com.salesforce.phoenix.util.TestUtil.or;
import static com.salesforce.phoenix.util.TestUtil.singleKVFilter;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.math.BigDecimal;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.text.Format;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
import com.salesforce.phoenix.expression.Expression;
import com.salesforce.phoenix.expression.LiteralExpression;
import com.salesforce.phoenix.expression.RowKeyColumnExpression;
import com.salesforce.phoenix.expression.function.SubstrFunction;
import com.salesforce.phoenix.filter.RowKeyComparisonFilter;
import com.salesforce.phoenix.filter.SkipScanFilter;
import com.salesforce.phoenix.jdbc.PhoenixConnection;
import com.salesforce.phoenix.jdbc.PhoenixPreparedStatement;
import com.salesforce.phoenix.query.BaseConnectionlessQueryTest;
import com.salesforce.phoenix.query.KeyRange;
import com.salesforce.phoenix.query.QueryConstants;
import com.salesforce.phoenix.schema.PDataType;
import com.salesforce.phoenix.schema.RowKeyValueAccessor;
import com.salesforce.phoenix.util.ByteUtil;
import com.salesforce.phoenix.util.DateUtil;
import com.salesforce.phoenix.util.NumberUtil;
import com.salesforce.phoenix.util.StringUtil;
public class WhereClauseCompileTest extends BaseConnectionlessQueryTest {
@Test
public void testSingleEqualFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer=0";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
}
@Test
public void testMultiColumnEqualFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_string=b_string";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(columnComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
BaseConnectionlessQueryTest.B_STRING)),
filter);
}
@Test
public void testCollapseFunctionToNull() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and substr(entity_id,null) = 'foo'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertNull(filter);
assertArrayEquals(scan.getStartRow(),KeyRange.EMPTY_RANGE.getLowerRange());
assertArrayEquals(scan.getStopRow(),KeyRange.EMPTY_RANGE.getUpperRange());
}
private static void bindParams(PhoenixPreparedStatement stmt, List<Object> binds) throws SQLException {
for (int i = 0; i < binds.size(); i++) {
stmt.setObject(i+1, binds.get(i));
}
}
@Test
public void testAndFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id=? and a_integer=0 and a_string='foo'";
List<Object> binds = Arrays.<Object>asList(tenantId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(and(
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
"foo"))),
filter);
}
@Test
public void testRHSLiteral() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and 0 >= a_integer";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.LESS_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
}
@Test
public void testToDateFilter() throws Exception {
String tenantId = "000000000000001";
String dateStr = "2012-01-01 12:00:00";
String query = "select * from atable where organization_id='" + tenantId + "' and a_date >= to_date('" + dateStr + "')";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
Format format = DateUtil.getDateParser(DateUtil.DEFAULT_DATE_FORMAT);
Object date = format.parseObject(dateStr);
assertEquals(
singleKVFilter(constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.A_DATE,
date)),
filter);
}
private void helpTestToNumberFilter(String toNumberClause, BigDecimal expectedDecimal) throws Exception {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and x_decimal >= " + toNumberClause;
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.X_DECIMAL,
expectedDecimal)),
filter);
}
private void helpTestToNumberFilterWithNoPattern(String stringValue) throws Exception {
String toNumberClause = "to_number('" + stringValue + "')";
BigDecimal expectedDecimal = NumberUtil.normalize(new BigDecimal(stringValue));
helpTestToNumberFilter(toNumberClause, expectedDecimal);
}
@Test
public void testToNumberFilterWithInteger() throws Exception {
String stringValue = "123";
helpTestToNumberFilterWithNoPattern(stringValue);
}
@Test
public void testToNumberFilterWithDecimal() throws Exception {
String stringValue = "123.33";
helpTestToNumberFilterWithNoPattern(stringValue);
}
@Test
public void testToNumberFilterWithNegativeDecimal() throws Exception {
String stringValue = "-123.33";
helpTestToNumberFilterWithNoPattern(stringValue);
}
@Test
public void testToNumberFilterWithPatternParam() throws Exception {
String toNumberClause = "to_number('$1.23333E2', '\u00A40.00000E0')";
BigDecimal expectedDecimal = NumberUtil.normalize(new BigDecimal("123.333"));
helpTestToNumberFilter(toNumberClause, expectedDecimal);
}
@Test(expected=AssertionError.class) // compileStatement() fails because zero rows are found by to_number()
public void testToNumberFilterWithPatternParamNegativeTest() throws Exception {
String toNumberClause = "to_number('$123.33', '000.00')"; // no currency sign in pattern param
BigDecimal expectedDecimal = NumberUtil.normalize(new BigDecimal("123.33"));
helpTestToNumberFilter(toNumberClause, expectedDecimal);
}
@Test
public void testRowKeyFilter() throws SQLException {
String keyPrefix = "foo";
String query = "select * from atable where substr(entity_id,1,3)=?";
List<Object> binds = Arrays.<Object>asList(keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new RowKeyComparisonFilter(
constantComparison(CompareOp.EQUAL,
new SubstrFunction(
Arrays.<Expression>asList(
new RowKeyColumnExpression(BaseConnectionlessQueryTest.ENTITY_ID,new RowKeyValueAccessor(BaseConnectionlessQueryTest.ATABLE.getPKColumns(),1)),
LiteralExpression.newConstant(1),
LiteralExpression.newConstant(3))
),
keyPrefix), QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES),
filter);
}
@Test
public void testPaddedRowKeyFilter() throws SQLException {
String keyPrefix = "fo";
String query = "select * from atable where entity_id=?";
List<Object> binds = Arrays.<Object>asList(keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertEquals(0,scan.getStartRow().length);
assertEquals(0,scan.getStopRow().length);
assertNotNull(scan.getFilter());
}
@Test
public void testPaddedStartStopKey() throws SQLException {
String tenantId = "000000000000001";
String keyPrefix = "fo";
String query = "select * from atable where organization_id=? AND entity_id=?";
List<Object> binds = Arrays.<Object>asList(tenantId,keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertArrayEquals(ByteUtil.concat(Bytes.toBytes(tenantId), StringUtil.padChar(Bytes.toBytes(keyPrefix), 15)),scan.getStartRow());
assertArrayEquals(ByteUtil.nextKey(scan.getStartRow()),scan.getStopRow());
}
@Test
public void testDegenerateRowKeyFilter() throws SQLException {
String keyPrefix = "foobar";
String query = "select * from atable where substr(entity_id,1,3)=?";
List<Object> binds = Arrays.<Object>asList(keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
// Degenerate b/c "foobar" is more than 3 characters
assertDegenerate(plan.getContext());
}
@Test
public void testDegenerateBiggerThanMaxLengthVarchar() throws SQLException {
byte[] tooBigValue = new byte[101];
Arrays.fill(tooBigValue, (byte)50);
String aString = (String)PDataType.VARCHAR.toObject(tooBigValue);
String query = "select * from atable where a_string=?";
List<Object> binds = Arrays.<Object>asList(aString);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
// Degenerate b/c a_string length is 100
assertDegenerate(plan.getContext());
}
@Test
public void testOrFilter() throws SQLException {
String tenantId = "000000000000001";
String keyPrefix = "foo";
int aInt = 2;
String query = "select * from atable where organization_id=? and (substr(entity_id,1,3)=? or a_integer=?)";
List<Object> binds = Arrays.<Object>asList(tenantId, keyPrefix, aInt);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter( // single b/c one column is a row key column
or(
constantComparison(
CompareOp.EQUAL,
new SubstrFunction(Arrays.<Expression> asList(
new RowKeyColumnExpression(
BaseConnectionlessQueryTest.ENTITY_ID,
new RowKeyValueAccessor(BaseConnectionlessQueryTest.ATABLE.getPKColumns(), 1)),
LiteralExpression.newConstant(1),
LiteralExpression.newConstant(3))),
keyPrefix),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
aInt))),
filter);
}
@Test
public void testTypeMismatch() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer > 'foo'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
try {
pstmt.optimizeQuery();
fail();
} catch (SQLException e) {
assertTrue(e.getMessage().contains("Type mismatch"));
}
}
@Test
public void testAndFalseFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer=0 and 2=3";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
assertDegenerate(plan.getContext());
}
@Test
public void testFalseFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and 2=3";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
assertDegenerate(plan.getContext());
}
@Test
public void testTrueFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and 2<=2";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertNull(scan.getFilter());
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testAndTrueFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer=0 and 2<3";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testOrFalseFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and (a_integer=0 or 3!=3)";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testOrTrueFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and (a_integer=0 or 3>2)";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertNull(filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testInFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_string IN ('a','b')";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(in(
kvColumn(BaseConnectionlessQueryTest.A_STRING),
"a",
"b")),
filter);
}
@Test
public void testInListFilter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s')",
ATABLE_NAME, tenantId1, tenantId3, tenantId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId1);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = PDataType.VARCHAR.toBytes(tenantId3);
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test @Ignore("OR not yet optimized")
public void testOr2InFilter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String query = String.format("select * from %s where organization_id='%s' OR organization_id='%s' OR organization_id='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId1);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = PDataType.VARCHAR.toBytes(tenantId3);
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testSecondPkColInListFilter() throws SQLException {
String tenantId = "000000000000001";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id='%s' AND entity_id IN ('%s','%s')",
ATABLE_NAME, tenantId, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId + entityId1);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = PDataType.VARCHAR.toBytes(tenantId + entityId2);
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(pointRange(tenantId)),
Arrays.asList(
pointRange(entityId1),
pointRange(entityId2))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testInListWithAnd1GTEFilter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id>='%s' AND entity_id<='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3)),
Arrays.asList(PDataType.CHAR.getKeyRange(
Bytes.toBytes(entityId1),
true,
Bytes.toBytes(entityId2),
true))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testInListWithAnd1Filter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId = "00000000000000X";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3)),
Arrays.asList(pointRange(entityId))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testInListWithAnd1FilterScankey() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId = "00000000000000X";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId1), PDataType.VARCHAR.toBytes(entityId));
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId3), PDataType.VARCHAR.toBytes(entityId));
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
// TODO: validate scan ranges
}
private static KeyRange pointRange(String id) {
return pointRange(Bytes.toBytes(id));
}
private static KeyRange pointRange(byte[] bytes) {
return KeyRange.POINT.apply(bytes);
}
@Test
public void testInListWithAnd2Filter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id IN ('%s', '%s')",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3)),
Arrays.asList(
pointRange(entityId1),
pointRange(entityId2))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testPartialRangeFilter() throws SQLException {
// I know these id's are ridiculous, but users can write queries that look like this
String tenantId1 = "001";
String tenantId2 = "02";
String query = String.format("select * from %s where organization_id > '%s' AND organization_id < '%s'",
ATABLE_NAME, tenantId1, tenantId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertNull(scan.getFilter());
byte[] wideLower = ByteUtil.nextKey(StringUtil.padChar(Bytes.toBytes(tenantId1), 15));
byte[] wideUpper = StringUtil.padChar(Bytes.toBytes(tenantId2), 15);
assertArrayEquals(wideLower, scan.getStartRow());
assertArrayEquals(wideUpper, scan.getStopRow());
}
@Test
public void testInListWithAnd2FilterScanKey() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id IN ('%s', '%s')",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId1),PDataType.VARCHAR.toBytes(entityId1));
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId3),PDataType.VARCHAR.toBytes(entityId2));
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
// TODO: validate scan ranges
}
@Test
public void testBetweenFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer between 0 and 10";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(and(
constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.LESS_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
10))),
filter);
}
@Test
public void testNotBetweenFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer not between 0 and 10";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(not(and(
constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.LESS_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
10)))).toString(),
filter.toString());
}
@Test
@Ignore // FIXME: broken because we prune multi-tenant tables from PMetaData on tenant-specific connection
public void testTenantConstraintsAddedToScan() throws SQLException {
String tenantTypeId = "5678";
String tenantId = "000000000000123";
String url = getUrl(tenantId);
createTestTable(getUrl(), "create table base_table_for_tenant_filter_test (tenant_id char(15) not null, type_id char(4) not null, " +
"id char(5) not null, a_integer integer, a_string varchar(100) constraint pk primary key (tenant_id, type_id, id)) multi_tenant=true");
createTestTable(url, "create view tenant_filter_test (tenant_col integer) AS SELECT * FROM BASE_TABLE_FOR_TENANT_FILTER_TEST WHERE type_id= '" + tenantTypeId + "'");
String query = "select * from tenant_filter_test where a_integer=0 and a_string='foo'";
PhoenixConnection pconn = DriverManager.getConnection(url, TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(and(
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
"foo"))),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId + tenantTypeId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
@Ignore // FIXME: broken because we prune multi-tenant tables from PMetaData on tenant-specific connection
public void testTenantConstraintsAddedToScanWithNullTenantTypeId() throws SQLException {
String tenantId = "000000000000123";
createTestTable(getUrl(), "create table base_table_for_tenant_filter_test (tenant_id char(15) not null, " +
"id char(5) not null, a_integer integer, a_string varchar(100) constraint pk primary key (tenant_id, id)) multi_tenant=true");
createTestTable(getUrl(tenantId), "create view tenant_filter_test (tenant_col integer) AS SELECT * FROM BASE_TABLE_FOR_TENANT_FILTER_TEST");
String query = "select * from tenant_filter_test where a_integer=0 and a_string='foo'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(tenantId), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(and(
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
"foo"))),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
}
| phoenix-core/src/test/java/com/salesforce/phoenix/compile/WhereClauseCompileTest.java | /*******************************************************************************
* Copyright (c) 2013, Salesforce.com, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of Salesforce.com nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************/
package com.salesforce.phoenix.compile;
import static com.salesforce.phoenix.util.TestUtil.ATABLE_NAME;
import static com.salesforce.phoenix.util.TestUtil.TEST_PROPERTIES;
import static com.salesforce.phoenix.util.TestUtil.and;
import static com.salesforce.phoenix.util.TestUtil.assertDegenerate;
import static com.salesforce.phoenix.util.TestUtil.columnComparison;
import static com.salesforce.phoenix.util.TestUtil.constantComparison;
import static com.salesforce.phoenix.util.TestUtil.in;
import static com.salesforce.phoenix.util.TestUtil.kvColumn;
import static com.salesforce.phoenix.util.TestUtil.multiKVFilter;
import static com.salesforce.phoenix.util.TestUtil.not;
import static com.salesforce.phoenix.util.TestUtil.or;
import static com.salesforce.phoenix.util.TestUtil.singleKVFilter;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.math.BigDecimal;
import java.sql.DriverManager;
import java.sql.SQLException;
import java.text.Format;
import java.util.Arrays;
import java.util.List;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.collect.ImmutableList;
import com.salesforce.phoenix.expression.Expression;
import com.salesforce.phoenix.expression.LiteralExpression;
import com.salesforce.phoenix.expression.RowKeyColumnExpression;
import com.salesforce.phoenix.expression.function.SubstrFunction;
import com.salesforce.phoenix.filter.RowKeyComparisonFilter;
import com.salesforce.phoenix.filter.SkipScanFilter;
import com.salesforce.phoenix.jdbc.PhoenixConnection;
import com.salesforce.phoenix.jdbc.PhoenixPreparedStatement;
import com.salesforce.phoenix.query.BaseConnectionlessQueryTest;
import com.salesforce.phoenix.query.KeyRange;
import com.salesforce.phoenix.query.QueryConstants;
import com.salesforce.phoenix.schema.PDataType;
import com.salesforce.phoenix.schema.RowKeyValueAccessor;
import com.salesforce.phoenix.util.ByteUtil;
import com.salesforce.phoenix.util.DateUtil;
import com.salesforce.phoenix.util.NumberUtil;
import com.salesforce.phoenix.util.StringUtil;
public class WhereClauseCompileTest extends BaseConnectionlessQueryTest {
@Test
public void testSingleEqualFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer=0";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
}
@Test
public void testMultiColumnEqualFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_string=b_string";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(columnComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
BaseConnectionlessQueryTest.B_STRING)),
filter);
}
@Test
public void testCollapseFunctionToNull() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and substr(entity_id,null) = 'foo'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertNull(filter);
assertArrayEquals(scan.getStartRow(),KeyRange.EMPTY_RANGE.getLowerRange());
assertArrayEquals(scan.getStopRow(),KeyRange.EMPTY_RANGE.getUpperRange());
}
private static void bindParams(PhoenixPreparedStatement stmt, List<Object> binds) throws SQLException {
for (int i = 0; i < binds.size(); i++) {
stmt.setObject(i+1, binds.get(i));
}
}
@Test
public void testAndFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id=? and a_integer=0 and a_string='foo'";
List<Object> binds = Arrays.<Object>asList(tenantId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(and(
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
"foo"))),
filter);
}
@Test
public void testRHSLiteral() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and 0 >= a_integer";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.LESS_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
}
@Test
public void testToDateFilter() throws Exception {
String tenantId = "000000000000001";
String dateStr = "2012-01-01 12:00:00";
String query = "select * from atable where organization_id='" + tenantId + "' and a_date >= to_date('" + dateStr + "')";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
Format format = DateUtil.getDateParser(DateUtil.DEFAULT_DATE_FORMAT);
Object date = format.parseObject(dateStr);
assertEquals(
singleKVFilter(constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.A_DATE,
date)),
filter);
}
private void helpTestToNumberFilter(String toNumberClause, BigDecimal expectedDecimal) throws Exception {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and x_decimal >= " + toNumberClause;
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.X_DECIMAL,
expectedDecimal)),
filter);
}
private void helpTestToNumberFilterWithNoPattern(String stringValue) throws Exception {
String toNumberClause = "to_number('" + stringValue + "')";
BigDecimal expectedDecimal = NumberUtil.normalize(new BigDecimal(stringValue));
helpTestToNumberFilter(toNumberClause, expectedDecimal);
}
@Test
public void testToNumberFilterWithInteger() throws Exception {
String stringValue = "123";
helpTestToNumberFilterWithNoPattern(stringValue);
}
@Test
public void testToNumberFilterWithDecimal() throws Exception {
String stringValue = "123.33";
helpTestToNumberFilterWithNoPattern(stringValue);
}
@Test
public void testToNumberFilterWithNegativeDecimal() throws Exception {
String stringValue = "-123.33";
helpTestToNumberFilterWithNoPattern(stringValue);
}
@Test
public void testToNumberFilterWithPatternParam() throws Exception {
String toNumberClause = "to_number('$1.23333E2', '\u00A40.00000E0')";
BigDecimal expectedDecimal = NumberUtil.normalize(new BigDecimal("123.333"));
helpTestToNumberFilter(toNumberClause, expectedDecimal);
}
@Test(expected=AssertionError.class) // compileStatement() fails because zero rows are found by to_number()
public void testToNumberFilterWithPatternParamNegativeTest() throws Exception {
String toNumberClause = "to_number('$123.33', '000.00')"; // no currency sign in pattern param
BigDecimal expectedDecimal = NumberUtil.normalize(new BigDecimal("123.33"));
helpTestToNumberFilter(toNumberClause, expectedDecimal);
}
@Test
public void testRowKeyFilter() throws SQLException {
String keyPrefix = "foo";
String query = "select * from atable where substr(entity_id,1,3)=?";
List<Object> binds = Arrays.<Object>asList(keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new RowKeyComparisonFilter(
constantComparison(CompareOp.EQUAL,
new SubstrFunction(
Arrays.<Expression>asList(
new RowKeyColumnExpression(BaseConnectionlessQueryTest.ENTITY_ID,new RowKeyValueAccessor(BaseConnectionlessQueryTest.ATABLE.getPKColumns(),1)),
LiteralExpression.newConstant(1),
LiteralExpression.newConstant(3))
),
keyPrefix), QueryConstants.DEFAULT_COLUMN_FAMILY_BYTES),
filter);
}
@Test
public void testPaddedRowKeyFilter() throws SQLException {
String keyPrefix = "fo";
String query = "select * from atable where entity_id=?";
List<Object> binds = Arrays.<Object>asList(keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertEquals(0,scan.getStartRow().length);
assertEquals(0,scan.getStopRow().length);
assertNotNull(scan.getFilter());
}
@Test
public void testPaddedStartStopKey() throws SQLException {
String tenantId = "000000000000001";
String keyPrefix = "fo";
String query = "select * from atable where organization_id=? AND entity_id=?";
List<Object> binds = Arrays.<Object>asList(tenantId,keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertArrayEquals(ByteUtil.concat(Bytes.toBytes(tenantId), StringUtil.padChar(Bytes.toBytes(keyPrefix), 15)),scan.getStartRow());
assertArrayEquals(ByteUtil.nextKey(scan.getStartRow()),scan.getStopRow());
}
@Test
public void testDegenerateRowKeyFilter() throws SQLException {
String keyPrefix = "foobar";
String query = "select * from atable where substr(entity_id,1,3)=?";
List<Object> binds = Arrays.<Object>asList(keyPrefix);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
// Degenerate b/c "foobar" is more than 3 characters
assertDegenerate(plan.getContext());
}
@Test
public void testDegenerateBiggerThanMaxLengthVarchar() throws SQLException {
byte[] tooBigValue = new byte[101];
Arrays.fill(tooBigValue, (byte)50);
String aString = (String)PDataType.VARCHAR.toObject(tooBigValue);
String query = "select * from atable where a_string=?";
List<Object> binds = Arrays.<Object>asList(aString);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
// Degenerate b/c a_string length is 100
assertDegenerate(plan.getContext());
}
@Test
public void testOrFilter() throws SQLException {
String tenantId = "000000000000001";
String keyPrefix = "foo";
int aInt = 2;
String query = "select * from atable where organization_id=? and (substr(entity_id,1,3)=? or a_integer=?)";
List<Object> binds = Arrays.<Object>asList(tenantId, keyPrefix, aInt);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
bindParams(pstmt, binds);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter( // single b/c one column is a row key column
or(
constantComparison(
CompareOp.EQUAL,
new SubstrFunction(Arrays.<Expression> asList(
new RowKeyColumnExpression(
BaseConnectionlessQueryTest.ENTITY_ID,
new RowKeyValueAccessor(BaseConnectionlessQueryTest.ATABLE.getPKColumns(), 1)),
LiteralExpression.newConstant(1),
LiteralExpression.newConstant(3))),
keyPrefix),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
aInt))),
filter);
}
@Test
public void testTypeMismatch() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer > 'foo'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
try {
pstmt.optimizeQuery();
fail();
} catch (SQLException e) {
assertTrue(e.getMessage().contains("Type mismatch"));
}
}
@Test
public void testAndFalseFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer=0 and 2=3";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
assertDegenerate(plan.getContext());
}
@Test
public void testFalseFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and 2=3";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
assertDegenerate(plan.getContext());
}
@Test
public void testTrueFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and 2<=2";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertNull(scan.getFilter());
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testAndTrueFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer=0 and 2<3";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testOrFalseFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and (a_integer=0 or 3!=3)";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0)),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testOrTrueFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and (a_integer=0 or 3>2)";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertNull(filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testInFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_string IN ('a','b')";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(in(
kvColumn(BaseConnectionlessQueryTest.A_STRING),
"a",
"b")),
filter);
}
@Test
public void testInListFilter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s')",
ATABLE_NAME, tenantId1, tenantId3, tenantId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId1);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = PDataType.VARCHAR.toBytes(tenantId3);
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test @Ignore("OR not yet optimized")
public void testOr2InFilter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String query = String.format("select * from %s where organization_id='%s' OR organization_id='%s' OR organization_id='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId1);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = PDataType.VARCHAR.toBytes(tenantId3);
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testSecondPkColInListFilter() throws SQLException {
String tenantId = "000000000000001";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id='%s' AND entity_id IN ('%s','%s')",
ATABLE_NAME, tenantId, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId + entityId1);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = PDataType.VARCHAR.toBytes(tenantId + entityId2);
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(pointRange(tenantId)),
Arrays.asList(
pointRange(entityId1),
pointRange(entityId2))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testInListWithAnd1GTEFilter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id>='%s' AND entity_id<='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3)),
Arrays.asList(PDataType.CHAR.getKeyRange(
Bytes.toBytes(entityId1),
true,
Bytes.toBytes(entityId2),
true))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testInListWithAnd1Filter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId = "00000000000000X";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3)),
Arrays.asList(pointRange(entityId))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testInListWithAnd1FilterScankey() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId = "00000000000000X";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id='%s'",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId1), PDataType.VARCHAR.toBytes(entityId));
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId3), PDataType.VARCHAR.toBytes(entityId));
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
// TODO: validate scan ranges
}
private static KeyRange pointRange(String id) {
return pointRange(Bytes.toBytes(id));
}
private static KeyRange pointRange(byte[] bytes) {
return KeyRange.POINT.apply(bytes);
}
@Test
public void testInListWithAnd2Filter() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id IN ('%s', '%s')",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
new SkipScanFilter(
ImmutableList.of(
Arrays.asList(
pointRange(tenantId1),
pointRange(tenantId2),
pointRange(tenantId3)),
Arrays.asList(
pointRange(entityId1),
pointRange(entityId2))),
plan.getContext().getResolver().getTables().get(0).getTable().getRowKeySchema()),
filter);
}
@Test
public void testPartialRangeFilter() throws SQLException {
// I know these id's are ridiculous, but users can write queries that look like this
String tenantId1 = "001";
String tenantId2 = "02";
String query = String.format("select * from %s where organization_id > '%s' AND organization_id < '%s'",
ATABLE_NAME, tenantId1, tenantId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
assertNull(scan.getFilter());
byte[] wideLower = ByteUtil.nextKey(StringUtil.padChar(Bytes.toBytes(tenantId1), 15));
byte[] wideUpper = StringUtil.padChar(Bytes.toBytes(tenantId2), 15);
assertArrayEquals(wideLower, scan.getStartRow());
assertArrayEquals(wideUpper, scan.getStopRow());
}
@Test
public void testInListWithAnd2FilterScanKey() throws SQLException {
String tenantId1 = "000000000000001";
String tenantId2 = "000000000000002";
String tenantId3 = "000000000000003";
String entityId1 = "00000000000000X";
String entityId2 = "00000000000000Y";
String query = String.format("select * from %s where organization_id IN ('%s','%s','%s') AND entity_id IN ('%s', '%s')",
ATABLE_NAME, tenantId1, tenantId3, tenantId2, entityId1, entityId2);
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
byte[] startRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId1),PDataType.VARCHAR.toBytes(entityId1));
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = ByteUtil.concat(PDataType.VARCHAR.toBytes(tenantId3),PDataType.VARCHAR.toBytes(entityId2));
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
// TODO: validate scan ranges
}
@Test
public void testBetweenFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer between 0 and 10";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(and(
constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.LESS_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
10))),
filter);
}
@Test
public void testNotBetweenFilter() throws SQLException {
String tenantId = "000000000000001";
String query = "select * from atable where organization_id='" + tenantId + "' and a_integer not between 0 and 10";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
singleKVFilter(not(and(
constantComparison(
CompareOp.GREATER_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.LESS_OR_EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
10)))).toString(),
filter.toString());
}
@Test
public void testTenantConstraintsAddedToScan() throws SQLException {
String tenantTypeId = "5678";
String tenantId = "000000000000123";
String url = getUrl(tenantId);
createTestTable(getUrl(), "create table base_table_for_tenant_filter_test (tenant_id char(15) not null, type_id char(4) not null, " +
"id char(5) not null, a_integer integer, a_string varchar(100) constraint pk primary key (tenant_id, type_id, id)) multi_tenant=true");
createTestTable(url, "create view tenant_filter_test (tenant_col integer) AS SELECT * FROM BASE_TABLE_FOR_TENANT_FILTER_TEST WHERE type_id= '" + tenantTypeId + "'");
String query = "select * from tenant_filter_test where a_integer=0 and a_string='foo'";
PhoenixConnection pconn = DriverManager.getConnection(url, TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(and(
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
"foo"))),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId + tenantTypeId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
@Test
public void testTenantConstraintsAddedToScanWithNullTenantTypeId() throws SQLException {
String tenantId = "000000000000123";
createTestTable(getUrl(), "create table base_table_for_tenant_filter_test (tenant_id char(15) not null, " +
"id char(5) not null, a_integer integer, a_string varchar(100) constraint pk primary key (tenant_id, id)) multi_tenant=true");
createTestTable(getUrl(tenantId), "create view tenant_filter_test (tenant_col integer) AS SELECT * FROM BASE_TABLE_FOR_TENANT_FILTER_TEST");
String query = "select * from tenant_filter_test where a_integer=0 and a_string='foo'";
PhoenixConnection pconn = DriverManager.getConnection(getUrl(tenantId), TEST_PROPERTIES).unwrap(PhoenixConnection.class);
PhoenixPreparedStatement pstmt = new PhoenixPreparedStatement(pconn, query);
QueryPlan plan = pstmt.optimizeQuery();
Scan scan = plan.getContext().getScan();
Filter filter = scan.getFilter();
assertEquals(
multiKVFilter(and(
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_INTEGER,
0),
constantComparison(
CompareOp.EQUAL,
BaseConnectionlessQueryTest.A_STRING,
"foo"))),
filter);
byte[] startRow = PDataType.VARCHAR.toBytes(tenantId);
assertArrayEquals(startRow, scan.getStartRow());
byte[] stopRow = startRow;
assertArrayEquals(ByteUtil.nextKey(stopRow), scan.getStopRow());
}
}
| Adding ignore test to two WhereClauseCompileTest broken due to caching changes (which are correct, but cause tables to remain in cache as required)
| phoenix-core/src/test/java/com/salesforce/phoenix/compile/WhereClauseCompileTest.java | Adding ignore test to two WhereClauseCompileTest broken due to caching changes (which are correct, but cause tables to remain in cache as required) |
|
Java | bsd-3-clause | de963c180507064e78a129b2472f3ed331bff802 | 0 | Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java,Unidata/netcdf-java | /*
* Copyright (c) 1998 - 2010. University Corporation for Atmospheric Research/Unidata
* Portions of this software were developed by the Unidata Program at the
* University Corporation for Atmospheric Research.
*
* Access and use of this software shall impose the following obligations
* and understandings on the user. The user is granted the right, without
* any fee or cost, to use, copy, modify, alter, enhance and distribute
* this software, and any derivative works thereof, and its supporting
* documentation for any purpose whatsoever, provided that this entire
* notice appears in all copies of the software, derivative works and
* supporting documentation. Further, UCAR requests that the user credit
* UCAR/Unidata in any publications that result from the use of this
* software or in any product that includes this software. The names UCAR
* and/or Unidata, however, may not be used in any advertising or publicity
* to endorse or promote any products or commercial entity unless specific
* written permission is obtained from UCAR/Unidata. The user also
* understands that UCAR/Unidata is not obligated to provide the user with
* any support, consulting, training or assistance of any kind with regard
* to the use, operation and performance of this software nor to provide
* the user with any updates, revisions, new versions or "bug fixes."
*
* THIS SOFTWARE IS PROVIDED BY UCAR/UNIDATA "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL UCAR/UNIDATA BE LIABLE FOR ANY SPECIAL,
* INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
* FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
* NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
* WITH THE ACCESS, USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package ucar.nc2.ft.point;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import ucar.ma2.DataType;
import ucar.ma2.StructureData;
import ucar.ma2.StructureMembers;
import ucar.nc2.VariableSimpleIF;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.dataset.NetcdfDataset;
import ucar.nc2.ft.*;
import ucar.nc2.time.CalendarDateRange;
import ucar.nc2.units.DateRange;
import ucar.unidata.geoloc.LatLonPointImpl;
import ucar.unidata.geoloc.LatLonRect;
import ucar.unidata.geoloc.Station;
import ucar.unidata.test.util.NeedsCdmUnitTest;
import ucar.unidata.test.util.TestDir;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
public class TestMiscPointFeature {
@Test
public void testProblem() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "point/trajMultidimJoinTime.ncml";
assert 20 == TestPointDatasets.checkPointDataset(location, FeatureType.TRAJECTORY, true);
}
@Test
public void testProblem2() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "point/stationMultidimTimeJoin.ncml";
assert 15 == TestPointDatasets.checkPointDataset(location, FeatureType.STATION, true);
}
@Test
public void testProblem3() throws IOException {
String location = TestDir.cdmUnitTestDir + "ft/stationProfile/PROFILER_RASS_01hr_20091027_1500.nc";
assert 198 == TestPointDatasets.checkPointDataset(location, FeatureType.STATION_PROFILE, true);
}
@Test
public void testStationVarLevels() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationData2Levels.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
assert fc instanceof StationTimeSeriesFeatureCollection;
StationTimeSeriesFeatureCollection sc = (StationTimeSeriesFeatureCollection) fc;
List<StationFeature> stations = sc.getStationFeatures();
for (StationFeature s : stations) {
StructureData sdata = s.getFeatureData();
StructureMembers.Member m = sdata.findMember("stnInfo");
assert m != null : "missing stnInfo";
assert m.getDataType() == DataType.STRING : "stnInfo not a string";
System.out.printf("stnInfo=%s%n", sdata.getScalarString(m));
}
PointFeatureCollectionIterator iter = sc.getPointFeatureCollectionIterator(-1);
while (iter.hasNext()) {
PointFeatureCollection pfc = iter.next();
assert pfc instanceof StationTimeSeriesFeatureImpl : pfc.getClass().getName();
StationTimeSeriesFeature s = (StationTimeSeriesFeature) pfc;
StructureData sdata = s.getFeatureData();
StructureMembers.Member m = sdata.findMember("stnInfo");
assert m != null : "missing stnInfo";
assert m.getDataType() == DataType.STRING : "stnInfo not a string";
System.out.printf("stnInfo=%s%n", sdata.getScalarString(m));
}
PointFeatureCollection pfc = sc.flatten(null, (CalendarDateRange) null, null);
PointFeatureIterator iter2 = pfc.getPointFeatureIterator(-1);
while (iter2.hasNext()) {
PointFeature pf = iter2.next();
assert pf instanceof StationPointFeature;
StationPointFeature s = (StationPointFeature) pf;
StructureData sdata = s.getFeatureData();
StructureMembers.Member m = sdata.findMember("stnInfo");
assert m == null : "stnInfo in leaf";
StructureData sdata2 = s.getDataAll();
m = sdata2.findMember("stnInfo");
assert m != null : "missing stnInfo";
assert m.getDataType() == DataType.STRING : "stnInfo not a string";
System.out.printf("stnInfo=%s%n", sdata2.getScalarString(m));
}
}
}
@Test
public void testStationVarSingle() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationSingle.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
StationCollection sc = (StationCollection) fc;
List<Station> stations = sc.getStations();
assert (stations.size() > 0) : "No stations";
Station s = stations.get(0);
assert s.getName().equals("666") : "name should be '666'";
assert !Double.isNaN(s.getAltitude()) : "No altitude on station";
assert s.getDescription() != null : "No description on station";
assert s.getDescription().equalsIgnoreCase("flabulous") : "description should equal 'flabulous'";
assert s.getWmoId() != null : "No wmoId on station";
assert s.getWmoId().equalsIgnoreCase("whoa") : "wmoId should equal 'whoa' but ='"+s.getWmoId()+"'";
}
}
@Test
public void testStationVarRagged() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationRaggedContig.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
StationCollection sc = (StationCollection) fc;
List<Station> stations = sc.getStations();
assert (stations.size() == 3) : "Should be 3 stations";
for (Station s : stations) {
System.out.printf("%s%n", s);
assert !Double.isNaN(s.getAltitude()) : "No altitude on station";
assert s.getDescription() != null && !s.getDescription().isEmpty() : "No description on station";
assert s.getWmoId() != null && !s.getWmoId().isEmpty() : "No wmoId on station";
}
}
}
@Test
public void testProfileSingleId() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/profileSingle.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.PROFILE, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof ProfileFeatureCollection;
ProfileFeatureCollection pc = (ProfileFeatureCollection) fc;
int count = 0;
pc.resetIteration();
while (pc.hasNext()) {
ProfileFeature pf = pc.next();
assert pf.getName().equals("666") : pf.getName() + " should be '666'";
count++;
}
assert count == 1;
}
}
@Test
public void testStationVarMulti() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationMultidim.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
StationCollection sc = (StationCollection) fc;
List<Station> stations = sc.getStations();
assert (stations.size() == 5) : "Should be 5 stations";
for (Station s : stations) {
System.out.printf("%s%n", s);
assert !Double.isNaN(s.getAltitude()) : "No altitude on station";
assert s.getDescription() != null && !s.getDescription().isEmpty() : "No description on station";
assert s.getWmoId() != null && !s.getWmoId().isEmpty() : "No wmoId on station";
}
}
}
@Test
public void testDataVars() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationSingle.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<VariableSimpleIF> dataVars = pods.getDataVariables();
for (VariableSimpleIF dv : dataVars) System.out.printf(" %s%n", dv );
assert (dataVars.size() == 1) : "Should only be one data var";
VariableSimpleIF data = dataVars.get(0);
assert data.getShortName().equalsIgnoreCase("data");
}
}
@Test
public void testAltUnits() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationRaggedContig.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
NestedPointFeatureCollection fc = (NestedPointFeatureCollection) collectionList.get(0);
assert fc.getAltUnits() != null : "no Alt Units";
assert fc.getAltUnits().equalsIgnoreCase("m") : "Alt Units should be 'm'";
}
}
// make sure that try/with tolerates a null return from FeatureDatasetFactoryManager
@Test
public void testTryWith() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "testWrite.nc";
Formatter errlog = new Formatter();
try (FeatureDataset fdataset = FeatureDatasetFactoryManager.open(null, location, null, errlog)) {
assert (fdataset == null);
}
}
@Test
public void testTryWithWrap() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "testWrite.nc";
NetcdfDataset ncd = NetcdfDataset.openDataset(location);
Formatter errlog = new Formatter();
try (FeatureDataset fdataset = FeatureDatasetFactoryManager.wrap(null, ncd, null, errlog)) {
assert (fdataset == null);
}
ncd.close();
}
// This is a regression test for TDS-513: https://bugtracking.unidata.ucar.edu/browse/TDS-513
@Test
public void testStationProfileMultidim1dTime() throws IOException {
FeatureType type = FeatureType.STATION_PROFILE;
String location = TestCFPointDatasets.CFpointObs_topdir + "stationProfileMultidim1dTime.ncml";
ucar.nc2.util.CancelTask task = null;
Formatter out = new Formatter();
FeatureDataset featDset = FeatureDatasetFactoryManager.open(type, location, task, out);
assert featDset != null && featDset instanceof FeatureDatasetPoint;
FeatureDatasetPoint featDsetPoint = (FeatureDatasetPoint) featDset;
List<FeatureCollection> featCols = featDsetPoint.getPointFeatureCollectionList();
assert !featCols.isEmpty();
FeatureCollection featCol = featCols.get(0); // We only care about the first one.
assert featCol instanceof StationProfileFeatureCollection;
StationProfileFeatureCollection stationProfileFeatCol = (StationProfileFeatureCollection) featCol;
assert stationProfileFeatCol.hasNext();
StationProfileFeature stationProfileFeat = stationProfileFeatCol.next(); // We only care about the first one.
List<Date> timesList = stationProfileFeat.getTimes();
Set<Date> timesSet = new TreeSet<Date>(stationProfileFeat.getTimes()); // Nukes dupes.
Assert.assertEquals(timesList.size(), timesSet.size()); // Assert that the times are unique.
}
@Test
public void testIterator() { // kunicki
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
Formatter formatter = new Formatter(System.err);
try (FeatureDataset fd = FeatureDatasetFactoryManager.open(FeatureType.STATION, TestDir.cdmLocalTestDataDir + "pointPre1.6/StandardPointFeatureIteratorIssue.ncml", null, formatter)) {
if (fd != null && fd instanceof FeatureDatasetPoint) {
FeatureDatasetPoint fdp = (FeatureDatasetPoint) fd;
FeatureCollection fc = fdp.getPointFeatureCollectionList().get(0);
if (fc != null && fc instanceof StationTimeSeriesFeatureCollection) {
StationTimeSeriesFeatureCollection stsfc = (StationTimeSeriesFeatureCollection) fc;
// subset criteria not important, just want to get data
// into flattened representation
PointFeatureCollection pfc = stsfc.flatten(
new LatLonRect(new LatLonPointImpl(-90, -180), new LatLonPointImpl(90, 180)),
new DateRange(df.parse("1900-01-01"), df.parse("2100-01-01")));
PointFeatureIterator pfi = pfc.getPointFeatureIterator(-1);
try {
while (pfi.hasNext()) {
PointFeature pf = pfi.next();
// the call to cursor.getParentStructure() in
// in StandardPointFeatureIterator.makeStation()
// is returning the observation structure, not the
// station structure since Cursor.currentIndex = 0
Station s = stsfc.getStation(pf);
System.out.println("stn= " + s);
}
} finally {
pfi.finish();
}
}
}
} catch (IOException | ParseException e) {
e.printStackTrace();
assert false;
}
}
@Test
@Category(NeedsCdmUnitTest.class)
public void testGempak() throws Exception {
String file = TestDir.cdmUnitTestDir + "formats/gempak/surface/09052812.sf"; // Q:/cdmUnitTest/formats/gempak/surface/09052812.sf
Formatter buf = new Formatter();
FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.POINT, file, null, buf);
if (pods == null) { // try as ANY_POINT
pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.ANY_POINT, file, null, buf);
}
if (pods == null) {
System.out.printf("can't open file=%s%n error=%s%n", file, buf);
throw new Exception("can't open file " + file);
}
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
if (collectionList.size() > 1) {
throw new IllegalArgumentException("Can't handle point data with multiple collections");
}
boolean sample;
for (int time = 0; time < 2; time++) {
sample = time < 1;
FeatureCollection fc = collectionList.get(0);
PointFeatureCollection collection = null;
LatLonRect llr = new LatLonRect(new LatLonPointImpl(33.4, -92.2), new LatLonPointImpl(47.9, -75.89));
System.out.println("llr = " + llr);
if (fc instanceof PointFeatureCollection) {
collection = (PointFeatureCollection) fc;
collection = collection.subset(llr, (CalendarDateRange) null);
} else if (fc instanceof NestedPointFeatureCollection) {
NestedPointFeatureCollection npfc = (NestedPointFeatureCollection) fc;
npfc = npfc.subset(llr);
collection = npfc.flatten(llr, (CalendarDateRange) null);
} else {
throw new IllegalArgumentException("Can't handle collection of type " + fc.getClass().getName());
}
List<PointFeature> pos = new ArrayList<>(100000);
List<Date> times = new ArrayList<>(100000);
PointFeatureIterator dataIterator = collection.getPointFeatureIterator(16384);
while (dataIterator.hasNext()) {
PointFeature po = dataIterator.next();
pos.add(po);
times.add(po.getNominalTimeAsDate());
// System.out.println("po = " + po);
if (sample) {
break;
}
}
int size = pos.size();
for (PointFeature po : pos) {
ucar.unidata.geoloc.EarthLocation el = po.getLocation();
System.out.println("el = " + el);
}
}
pods.close();
}
}
| cdm-test/src/test/java/ucar/nc2/ft/point/TestMiscPointFeature.java | /*
* Copyright (c) 1998 - 2010. University Corporation for Atmospheric Research/Unidata
* Portions of this software were developed by the Unidata Program at the
* University Corporation for Atmospheric Research.
*
* Access and use of this software shall impose the following obligations
* and understandings on the user. The user is granted the right, without
* any fee or cost, to use, copy, modify, alter, enhance and distribute
* this software, and any derivative works thereof, and its supporting
* documentation for any purpose whatsoever, provided that this entire
* notice appears in all copies of the software, derivative works and
* supporting documentation. Further, UCAR requests that the user credit
* UCAR/Unidata in any publications that result from the use of this
* software or in any product that includes this software. The names UCAR
* and/or Unidata, however, may not be used in any advertising or publicity
* to endorse or promote any products or commercial entity unless specific
* written permission is obtained from UCAR/Unidata. The user also
* understands that UCAR/Unidata is not obligated to provide the user with
* any support, consulting, training or assistance of any kind with regard
* to the use, operation and performance of this software nor to provide
* the user with any updates, revisions, new versions or "bug fixes."
*
* THIS SOFTWARE IS PROVIDED BY UCAR/UNIDATA "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL UCAR/UNIDATA BE LIABLE FOR ANY SPECIAL,
* INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
* FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
* NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
* WITH THE ACCESS, USE OR PERFORMANCE OF THIS SOFTWARE.
*/
package ucar.nc2.ft.point;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import ucar.ma2.DataType;
import ucar.ma2.StructureData;
import ucar.ma2.StructureMembers;
import ucar.nc2.VariableSimpleIF;
import ucar.nc2.constants.FeatureType;
import ucar.nc2.dataset.NetcdfDataset;
import ucar.nc2.ft.*;
import ucar.nc2.time.CalendarDateRange;
import ucar.nc2.units.DateRange;
import ucar.unidata.geoloc.LatLonPointImpl;
import ucar.unidata.geoloc.LatLonRect;
import ucar.unidata.geoloc.Station;
import ucar.unidata.test.util.NeedsCdmUnitTest;
import ucar.unidata.test.util.TestDir;
import java.io.IOException;
import java.text.DateFormat;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
public class TestMiscPointFeature {
@Test
public void testProblem() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "point/trajMultidimJoinTime.ncml";
assert 20 == TestPointDatasets.checkPointDataset(location, FeatureType.TRAJECTORY, true);
}
@Test
public void testProblem2() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "point/stationMultidimTimeJoin.ncml";
assert 15 == TestPointDatasets.checkPointDataset(location, FeatureType.STATION, true);
}
@Test
public void testStationVarLevels() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationData2Levels.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
assert fc instanceof StationTimeSeriesFeatureCollection;
StationTimeSeriesFeatureCollection sc = (StationTimeSeriesFeatureCollection) fc;
List<StationFeature> stations = sc.getStationFeatures();
for (StationFeature s : stations) {
StructureData sdata = s.getFeatureData();
StructureMembers.Member m = sdata.findMember("stnInfo");
assert m != null : "missing stnInfo";
assert m.getDataType() == DataType.STRING : "stnInfo not a string";
System.out.printf("stnInfo=%s%n", sdata.getScalarString(m));
}
PointFeatureCollectionIterator iter = sc.getPointFeatureCollectionIterator(-1);
while (iter.hasNext()) {
PointFeatureCollection pfc = iter.next();
assert pfc instanceof StationTimeSeriesFeatureImpl : pfc.getClass().getName();
StationTimeSeriesFeature s = (StationTimeSeriesFeature) pfc;
StructureData sdata = s.getFeatureData();
StructureMembers.Member m = sdata.findMember("stnInfo");
assert m != null : "missing stnInfo";
assert m.getDataType() == DataType.STRING : "stnInfo not a string";
System.out.printf("stnInfo=%s%n", sdata.getScalarString(m));
}
PointFeatureCollection pfc = sc.flatten(null, (CalendarDateRange) null, null);
PointFeatureIterator iter2 = pfc.getPointFeatureIterator(-1);
while (iter2.hasNext()) {
PointFeature pf = iter2.next();
assert pf instanceof StationPointFeature;
StationPointFeature s = (StationPointFeature) pf;
StructureData sdata = s.getFeatureData();
StructureMembers.Member m = sdata.findMember("stnInfo");
assert m == null : "stnInfo in leaf";
StructureData sdata2 = s.getDataAll();
m = sdata2.findMember("stnInfo");
assert m != null : "missing stnInfo";
assert m.getDataType() == DataType.STRING : "stnInfo not a string";
System.out.printf("stnInfo=%s%n", sdata2.getScalarString(m));
}
}
}
@Test
public void testStationVarSingle() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationSingle.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
StationCollection sc = (StationCollection) fc;
List<Station> stations = sc.getStations();
assert (stations.size() > 0) : "No stations";
Station s = stations.get(0);
assert s.getName().equals("666") : "name should be '666'";
assert !Double.isNaN(s.getAltitude()) : "No altitude on station";
assert s.getDescription() != null : "No description on station";
assert s.getDescription().equalsIgnoreCase("flabulous") : "description should equal 'flabulous'";
assert s.getWmoId() != null : "No wmoId on station";
assert s.getWmoId().equalsIgnoreCase("whoa") : "wmoId should equal 'whoa' but ='"+s.getWmoId()+"'";
}
}
@Test
public void testStationVarRagged() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationRaggedContig.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
StationCollection sc = (StationCollection) fc;
List<Station> stations = sc.getStations();
assert (stations.size() == 3) : "Should be 3 stations";
for (Station s : stations) {
System.out.printf("%s%n", s);
assert !Double.isNaN(s.getAltitude()) : "No altitude on station";
assert s.getDescription() != null && !s.getDescription().isEmpty() : "No description on station";
assert s.getWmoId() != null && !s.getWmoId().isEmpty() : "No wmoId on station";
}
}
}
@Test
public void testProfileSingleId() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/profileSingle.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.PROFILE, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof ProfileFeatureCollection;
ProfileFeatureCollection pc = (ProfileFeatureCollection) fc;
int count = 0;
pc.resetIteration();
while (pc.hasNext()) {
ProfileFeature pf = pc.next();
assert pf.getName().equals("666") : pf.getName() + " should be '666'";
count++;
}
assert count == 1;
}
}
@Test
public void testStationVarMulti() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationMultidim.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
FeatureCollection fc = collectionList.get(0);
assert fc instanceof StationCollection;
StationCollection sc = (StationCollection) fc;
List<Station> stations = sc.getStations();
assert (stations.size() == 5) : "Should be 5 stations";
for (Station s : stations) {
System.out.printf("%s%n", s);
assert !Double.isNaN(s.getAltitude()) : "No altitude on station";
assert s.getDescription() != null && !s.getDescription().isEmpty() : "No description on station";
assert s.getWmoId() != null && !s.getWmoId().isEmpty() : "No wmoId on station";
}
}
}
@Test
public void testDataVars() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationSingle.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<VariableSimpleIF> dataVars = pods.getDataVariables();
for (VariableSimpleIF dv : dataVars) System.out.printf(" %s%n", dv );
assert (dataVars.size() == 1) : "Should only be one data var";
VariableSimpleIF data = dataVars.get(0);
assert data.getShortName().equalsIgnoreCase("data");
}
}
@Test
public void testAltUnits() throws Exception {
String file = TestDir.cdmLocalTestDataDir + "point/stationRaggedContig.ncml";
Formatter buf = new Formatter();
try (FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.STATION, file, null, buf)) {
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
assert (collectionList.size() == 1) : "Can't handle point data with multiple collections";
NestedPointFeatureCollection fc = (NestedPointFeatureCollection) collectionList.get(0);
assert fc.getAltUnits() != null : "no Alt Units";
assert fc.getAltUnits().equalsIgnoreCase("m") : "Alt Units should be 'm'";
}
}
// make sure that try/with tolerates a null return from FeatureDatasetFactoryManager
@Test
public void testTryWith() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "testWrite.nc";
Formatter errlog = new Formatter();
try (FeatureDataset fdataset = FeatureDatasetFactoryManager.open(null, location, null, errlog)) {
assert (fdataset == null);
}
}
@Test
public void testTryWithWrap() throws IOException {
String location = TestDir.cdmLocalTestDataDir + "testWrite.nc";
NetcdfDataset ncd = NetcdfDataset.openDataset(location);
Formatter errlog = new Formatter();
try (FeatureDataset fdataset = FeatureDatasetFactoryManager.wrap(null, ncd, null, errlog)) {
assert (fdataset == null);
}
ncd.close();
}
// This is a regression test for TDS-513: https://bugtracking.unidata.ucar.edu/browse/TDS-513
@Test
public void testStationProfileMultidim1dTime() throws IOException {
FeatureType type = FeatureType.STATION_PROFILE;
String location = TestCFPointDatasets.CFpointObs_topdir + "stationProfileMultidim1dTime.ncml";
ucar.nc2.util.CancelTask task = null;
Formatter out = new Formatter();
FeatureDataset featDset = FeatureDatasetFactoryManager.open(type, location, task, out);
assert featDset != null && featDset instanceof FeatureDatasetPoint;
FeatureDatasetPoint featDsetPoint = (FeatureDatasetPoint) featDset;
List<FeatureCollection> featCols = featDsetPoint.getPointFeatureCollectionList();
assert !featCols.isEmpty();
FeatureCollection featCol = featCols.get(0); // We only care about the first one.
assert featCol instanceof StationProfileFeatureCollection;
StationProfileFeatureCollection stationProfileFeatCol = (StationProfileFeatureCollection) featCol;
assert stationProfileFeatCol.hasNext();
StationProfileFeature stationProfileFeat = stationProfileFeatCol.next(); // We only care about the first one.
List<Date> timesList = stationProfileFeat.getTimes();
Set<Date> timesSet = new TreeSet<Date>(stationProfileFeat.getTimes()); // Nukes dupes.
Assert.assertEquals(timesList.size(), timesSet.size()); // Assert that the times are unique.
}
@Test
public void testIterator() { // kunicki
DateFormat df = new SimpleDateFormat("yyyy-MM-dd");
Formatter formatter = new Formatter(System.err);
try (FeatureDataset fd = FeatureDatasetFactoryManager.open(FeatureType.STATION, TestDir.cdmLocalTestDataDir + "pointPre1.6/StandardPointFeatureIteratorIssue.ncml", null, formatter)) {
if (fd != null && fd instanceof FeatureDatasetPoint) {
FeatureDatasetPoint fdp = (FeatureDatasetPoint) fd;
FeatureCollection fc = fdp.getPointFeatureCollectionList().get(0);
if (fc != null && fc instanceof StationTimeSeriesFeatureCollection) {
StationTimeSeriesFeatureCollection stsfc = (StationTimeSeriesFeatureCollection) fc;
// subset criteria not important, just want to get data
// into flattened representation
PointFeatureCollection pfc = stsfc.flatten(
new LatLonRect(new LatLonPointImpl(-90, -180), new LatLonPointImpl(90, 180)),
new DateRange(df.parse("1900-01-01"), df.parse("2100-01-01")));
PointFeatureIterator pfi = pfc.getPointFeatureIterator(-1);
try {
while (pfi.hasNext()) {
PointFeature pf = pfi.next();
// the call to cursor.getParentStructure() in
// in StandardPointFeatureIterator.makeStation()
// is returning the observation structure, not the
// station structure since Cursor.currentIndex = 0
Station s = stsfc.getStation(pf);
System.out.println("stn= " + s);
}
} finally {
pfi.finish();
}
}
}
} catch (IOException | ParseException e) {
e.printStackTrace();
assert false;
}
}
@Test
@Category(NeedsCdmUnitTest.class)
public void testGempak() throws Exception {
String file = TestDir.cdmUnitTestDir + "formats/gempak/surface/09052812.sf"; // Q:/cdmUnitTest/formats/gempak/surface/09052812.sf
Formatter buf = new Formatter();
FeatureDatasetPoint pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.POINT, file, null, buf);
if (pods == null) { // try as ANY_POINT
pods = (FeatureDatasetPoint) FeatureDatasetFactoryManager.open(ucar.nc2.constants.FeatureType.ANY_POINT, file, null, buf);
}
if (pods == null) {
System.out.printf("can't open file=%s%n error=%s%n", file, buf);
throw new Exception("can't open file " + file);
}
List<FeatureCollection> collectionList = pods.getPointFeatureCollectionList();
if (collectionList.size() > 1) {
throw new IllegalArgumentException("Can't handle point data with multiple collections");
}
boolean sample;
for (int time = 0; time < 2; time++) {
sample = time < 1;
FeatureCollection fc = collectionList.get(0);
PointFeatureCollection collection = null;
LatLonRect llr = new LatLonRect(new LatLonPointImpl(33.4, -92.2), new LatLonPointImpl(47.9, -75.89));
System.out.println("llr = " + llr);
if (fc instanceof PointFeatureCollection) {
collection = (PointFeatureCollection) fc;
collection = collection.subset(llr, (CalendarDateRange) null);
} else if (fc instanceof NestedPointFeatureCollection) {
NestedPointFeatureCollection npfc = (NestedPointFeatureCollection) fc;
npfc = npfc.subset(llr);
collection = npfc.flatten(llr, (CalendarDateRange) null);
} else {
throw new IllegalArgumentException("Can't handle collection of type " + fc.getClass().getName());
}
List<PointFeature> pos = new ArrayList<>(100000);
List<Date> times = new ArrayList<>(100000);
PointFeatureIterator dataIterator = collection.getPointFeatureIterator(16384);
while (dataIterator.hasNext()) {
PointFeature po = dataIterator.next();
pos.add(po);
times.add(po.getNominalTimeAsDate());
// System.out.println("po = " + po);
if (sample) {
break;
}
}
int size = pos.size();
for (PointFeature po : pos) {
ucar.unidata.geoloc.EarthLocation el = po.getLocation();
System.out.println("el = " + el);
}
}
pods.close();
}
}
| FMRC: schema, docs update, fix delete collection.
| cdm-test/src/test/java/ucar/nc2/ft/point/TestMiscPointFeature.java | FMRC: schema, docs update, fix delete collection. |
|
Java | isc | 94e6cb9ef765ffad0521bf8533e64d296dac6a9a | 0 | pauldoo/proffy,pauldoo/scratch,pauldoo/proffy,pauldoo/racepoint,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/scratch,pauldoo/proffy | /*
Copyright (C) 2008, 2009, 2010 Paul Richards.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package fractals;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
public final class Utilities
{
private static final ScheduledExecutorService lightThreadPool =
new ScheduledThreadPoolExecutor(2, new MyThreadFactory(Thread.NORM_PRIORITY));
private static final ScheduledExecutorService heavyThreadPool =
new ScheduledThreadPoolExecutor(Runtime.getRuntime().availableProcessors(), new MyThreadFactory((Thread.NORM_PRIORITY + Thread.MIN_PRIORITY) / 2));
private static final ScheduledExecutorService backgroundThreadPool =
new ScheduledThreadPoolExecutor(Runtime.getRuntime().availableProcessors(), new MyThreadFactory(Thread.MIN_PRIORITY));
private static final class MyThreadFactory implements ThreadFactory
{
final int priority;
MyThreadFactory(int priority)
{
this.priority = priority;
}
@Override
public Thread newThread(Runnable r) {
Thread result = new Thread(r);
result.setPriority(priority);
result.setDaemon(true);
return result;
}
}
private Utilities()
{
}
static void setGraphicsToHighQuality(Graphics2D g)
{
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
}
static void setGraphicsToLowQuality(Graphics2D g)
{
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
}
/**
Simplistic exposure function that returns 1.0 - e^(-exposure * value).
*/
static double expose(double value, double exposure)
{
return 1.0 - Math.exp(-exposure * value);
}
/**
A thread pool intended for light background tasks (e.g. to triggering repaints).
*/
static ScheduledExecutorService getLightThreadPool()
{
return lightThreadPool;
}
/**
A thread pool intended for heavy tasks that benefit image quality
in the short term (e.g. rendering, projecting).
*/
static ScheduledExecutorService getHeavyThreadPool()
{
return heavyThreadPool;
}
/**
A thread pool intended for heavy tasks that benefit image quality
in the long term (e.g. further sampling expected to take many seconds).
*/
static ScheduledExecutorService getBackgroundThreadPool()
{
return backgroundThreadPool;
}
/**
Clamps the value 'x' to be in the range [min, max].
*/
static int clamp(final int min, final int x, final int max)
{
if (max < min) {
throw new IllegalArgumentException("Max is less than min");
}
return Math.max(min, Math.min(max, x));
}
/**
Copies an array of doubles, returns the new copy.
*/
static double[] copyDoubleArray(double[] source)
{
final double[] result = new double[source.length];
System.arraycopy(source, 0, result, 0, source.length);
return result;
}
/**
Asserts that the value is not NaN, then returns it.
*/
public static double assertNotNaN(double x) throws NotANumberException
{
if (Double.isNaN(x)) {
throw new NotANumberException();
}
return x;
}
}
| Fractals/src/fractals/Utilities.java | /*
Copyright (C) 2008, 2009, 2010 Paul Richards.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package fractals;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory;
public final class Utilities
{
private static final ScheduledExecutorService lightThreadPool =
new ScheduledThreadPoolExecutor(2, new MyThreadFactory(Thread.NORM_PRIORITY));
private static final ScheduledExecutorService heavyThreadPool =
new ScheduledThreadPoolExecutor(2, new MyThreadFactory((Thread.NORM_PRIORITY + Thread.MIN_PRIORITY) / 2));
private static final ScheduledExecutorService backgroundThreadPool =
new ScheduledThreadPoolExecutor(2, new MyThreadFactory(Thread.MIN_PRIORITY));
private static final class MyThreadFactory implements ThreadFactory
{
final int priority;
MyThreadFactory(int priority)
{
this.priority = priority;
}
@Override
public Thread newThread(Runnable r) {
Thread result = new Thread(r);
result.setPriority(priority);
result.setDaemon(true);
return result;
}
}
private Utilities()
{
}
static void setGraphicsToHighQuality(Graphics2D g)
{
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
}
static void setGraphicsToLowQuality(Graphics2D g)
{
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_OFF);
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_SPEED);
}
/**
Simplistic exposure function that returns 1.0 - e^(-exposure * value).
*/
static double expose(double value, double exposure)
{
return 1.0 - Math.exp(-exposure * value);
}
/**
A thread pool intended for light background tasks (e.g. to triggering repaints).
*/
static ScheduledExecutorService getLightThreadPool()
{
return lightThreadPool;
}
/**
A thread pool intended for heavy tasks that benefit image quality
in the short term (e.g. rendering, projecting).
*/
static ScheduledExecutorService getHeavyThreadPool()
{
return heavyThreadPool;
}
/**
A thread pool intended for heavy tasks that benefit image quality
in the long term (e.g. further sampling expected to take many seconds).
*/
static ScheduledExecutorService getBackgroundThreadPool()
{
return backgroundThreadPool;
}
/**
Clamps the value 'x' to be in the range [min, max].
*/
static int clamp(final int min, final int x, final int max)
{
if (max < min) {
throw new IllegalArgumentException("Max is less than min");
}
return Math.max(min, Math.min(max, x));
}
/**
Copies an array of doubles, returns the new copy.
*/
static double[] copyDoubleArray(double[] source)
{
final double[] result = new double[source.length];
System.arraycopy(source, 0, result, 0, source.length);
return result;
}
/**
Asserts that the value is not NaN, then returns it.
*/
public static double assertNotNaN(double x) throws NotANumberException
{
if (Double.isNaN(x)) {
throw new NotANumberException();
}
return x;
}
}
| Thread pools now use full complement of threads.
| Fractals/src/fractals/Utilities.java | Thread pools now use full complement of threads. |
|
Java | mit | 2f7eb2a08fcc465009979183a7202074fc073d56 | 0 | KMU-bank/KMU-Bank | package account;
import java.io.Serializable;
import java.util.LinkedList;
@SuppressWarnings("serial")
public class Account implements Serializable {
private String accountNumber;
private String name;
private int balance;
private boolean haveCard = false;
private LinkedList<String> stateList = new LinkedList<String>();
private int debt = 0;
private boolean stolen = false;
public Account(String accountNumber, String name) {
this.accountNumber = accountNumber;
this.name = name;
this.balance = 0;
}
// getter & setter...
public String getAccountNumber() {
return accountNumber;
}
public String getName() {
return name;
}
public int getBalance() {
return balance;
}
public int getDebt() {
return debt;
}
public LinkedList<String> getStateList() {
return stateList;
}
public void addStateList(String state) {
stateList.add(state);
}
// basic function...
public boolean deposit(int money) {
if (money <= 0)
return false;
balance += money;
return true;
}
public boolean withdraw(int money) {
if (balance < money)
return false;
balance -= money;
return true;
}
public boolean transfer(String accountNumber, int money) {
return withdraw(money);
}
// loan function
public void loan(int money) {
debt += money;
}
public boolean repay(int money) {
if (debt < money)
return false;
debt -= money;
return true;
}
public boolean repayOnAccount(int money) {
boolean isDone = withdraw(money);
if (isDone)
debt -= money;
return isDone;
}
// card function...
public void lostReport() {
stolen = true;
}
public void findReport() {
stolen = false;
}
public boolean makeCard() {
if (haveCard)
return false; // if already have card
haveCard = true;
return true;
}
public boolean useCard(String state, int money) {
if (!haveCard || stolen)
return false;
balance -= money;
return true;
}
}
| src/account/Account.java | package account;
import java.io.Serializable;
import java.util.LinkedList;
public class Account implements Serializable {
private String accountNumber;
private String name;
private int balance;
private boolean haveCard = false;
private LinkedList<String> stateList = new LinkedList<String>();
private int debt = 0;
private boolean stolen = false;
public Account(String accountNumber, String name) {
this.accountNumber = accountNumber;
this.name = name;
this.balance = 0;
}
// getter & setter...
public String getAccountNumber() {
return accountNumber;
}
public String getName() {
return name;
}
public int getBalance() {
return balance;
}
public int getDebt() {
return debt;
}
public LinkedList<String> getStateList() {
return stateList;
}
public void addStateList(String state) {
stateList.add(state);
}
// basic function...
public boolean deposit(int money) {
if (money <= 0)
return false;
balance += money;
return true;
}
public boolean withdraw(int money) {
if (balance < money)
return false;
balance -= money;
return true;
}
public boolean transfer(String accountNumber, int money) {
return withdraw(money);
}
// loan function
public void loan(int money) {
debt += money;
}
public boolean repay(int money) {
if (debt < money)
return false;
debt -= money;
return true;
}
public boolean repayOnAccount(int money) {
boolean isDone = withdraw(money);
if (isDone)
debt -= money;
return isDone;
}
// card function...
public void lostReport() {
stolen = true;
}
public void findReport() {
stolen = false;
}
public boolean makeCard() {
if (haveCard)
return false; // if already have card
haveCard = true;
return true;
}
public boolean useCard(String state, int money) {
if (!haveCard || stolen)
return false;
balance -= money;
return true;
}
}
| add notation
| src/account/Account.java | add notation |
|
Java | mit | 40b78de8edf13ccd0b2783c5b567837d3fb84c51 | 0 | GitHubRGI/swagd,GitHubRGI/swagd,GitHubRGI/swagd,GitHubRGI/swagd | /* Copyright (C) 2014 Reinventing Geospatial, Inc
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>,
* or write to the Free Software Foundation, Inc., 59 Temple Place -
* Suite 330, Boston, MA 02111-1307, USA.
*/
package com.rgi.common.coordinate.referencesystem.profile;
import com.rgi.common.BoundingBox;
import com.rgi.common.Dimensions;
import com.rgi.common.coordinate.Coordinate;
import com.rgi.common.coordinate.CoordinateReferenceSystem;
import com.rgi.common.coordinate.CrsCoordinate;
import com.rgi.common.tile.TileOrigin;
import com.rgi.common.tile.scheme.TileMatrixDimensions;
/**
* @author Luke Lambert
*
*/
public class SphericalMercatorCrsProfile implements CrsProfile
{
@Override
public Coordinate<Integer> crsToTileCoordinate(final CrsCoordinate coordinate,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(coordinate == null)
{
throw new IllegalArgumentException("Meter coordinate may not be null");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
if(!coordinate.getCoordinateReferenceSystem().equals(this.getCoordinateReferenceSystem()))
{
throw new IllegalArgumentException("Coordinate's coordinate reference system does not match the tile profile's coordinate reference system");
}
if(!Utility.contains(Bounds, coordinate, tileOrigin))
{
throw new IllegalArgumentException("Coordinate is outside the bounds of this coordinate reference system");
}
final Coordinate<Double> boundsCorner = Utility.boundsCorner(Bounds, tileOrigin);
final Dimensions tileDimensions = this.getTileDimensions(dimensions);
final double normalizedSrsTileCoordinateY = Math.abs(coordinate.getY() - boundsCorner.getY());
final double normalizedSrsTileCoordinateX = Math.abs(coordinate.getX() - boundsCorner.getX());
final int tileY = (int)Math.floor(normalizedSrsTileCoordinateY / tileDimensions.getHeight());
final int tileX = (int)Math.floor(normalizedSrsTileCoordinateX / tileDimensions.getWidth());
return new Coordinate<>(tileY, tileX);
}
@Override
public CrsCoordinate tileToCrsCoordinate(final int row,
final int column,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(row < 0)
{
throw new IllegalArgumentException("Row must be at least 0");
}
if(column < 0)
{
throw new IllegalArgumentException("Column must be at least 0");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
final double tileHeight = EarthEquatorialCircumfrence / dimensions.getHeight();
final double tileWidth = EarthEquatorialCircumfrence / dimensions.getWidth();
final double originShift = (EarthEquatorialCircumfrence / 2.0);
final Coordinate<Integer> tileCoordinate = TileOrigin.LowerLeft.transform(tileOrigin,
row,
column,
dimensions);
return new CrsCoordinate((tileCoordinate.getY() * tileHeight) - originShift,
(tileCoordinate.getX() * tileWidth) - originShift,
this.getCoordinateReferenceSystem());
}
@Override
public Dimensions getTileDimensions(final TileMatrixDimensions dimensions)
{
return new Dimensions(EarthEquatorialCircumfrence / dimensions.getHeight(),
EarthEquatorialCircumfrence / dimensions.getWidth());
}
@Override
public CoordinateReferenceSystem getCoordinateReferenceSystem()
{
return SphericalMercatorCrsProfile.CoordinateReferenceSystem;
}
@Override
public Coordinate<Double> toGlobalGeodetic(final Coordinate<Double> coordinate)
{
// TODO algorithm documentation
return new Coordinate<>(Math.toDegrees(2 * Math.atan(Math.exp(coordinate.getY() / EarthEquatorialRadius)) - Math.PI / 2),
Math.toDegrees(coordinate.getX() / EarthEquatorialRadius));
}
@Override
public BoundingBox getBounds()
{
return Bounds;
}
/**
* Datum's spheroid's semi-major axis (radius of earth) in meters
*/
public static final double EarthEquatorialRadius = 6378137.0;
public static final BoundingBox Bounds = new BoundingBox(-Math.PI * EarthEquatorialRadius,
-Math.PI * EarthEquatorialRadius,
Math.PI * EarthEquatorialRadius,
Math.PI * EarthEquatorialRadius);
/**
* Earth's equatorial circumference (based on the datum's spheroid's semi-major axis, radius) in meters
*/
public static final double EarthEquatorialCircumfrence = 2.0 * Math.PI * EarthEquatorialRadius;
private final static CoordinateReferenceSystem CoordinateReferenceSystem = new CoordinateReferenceSystem("EPSG", 3857);
}
| Common/src/com/rgi/common/coordinate/referencesystem/profile/SphericalMercatorCrsProfile.java | /* Copyright (C) 2014 Reinventing Geospatial, Inc
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>,
* or write to the Free Software Foundation, Inc., 59 Temple Place -
* Suite 330, Boston, MA 02111-1307, USA.
*/
package com.rgi.common.coordinate.referencesystem.profile;
import com.rgi.common.BoundingBox;
import com.rgi.common.Dimensions;
import com.rgi.common.coordinate.Coordinate;
import com.rgi.common.coordinate.CoordinateReferenceSystem;
import com.rgi.common.coordinate.CrsCoordinate;
import com.rgi.common.tile.TileOrigin;
import com.rgi.common.tile.scheme.TileMatrixDimensions;
/**
* @author Luke Lambert
*
*/
public class SphericalMercatorCrsProfile implements CrsProfile
{
@Override
public Coordinate<Integer> crsToTileCoordinate(final CrsCoordinate coordinate,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(coordinate == null)
{
throw new IllegalArgumentException("Meter coordinate may not be null");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
if(!Utility.contains(Bounds, coordinate, tileOrigin))
{
throw new IllegalArgumentException("Coordinate is outside the bounds of this coordinate reference system");
}
final Coordinate<Double> boundsCorner = Utility.boundsCorner(Bounds, tileOrigin);
final Dimensions tileDimensions = this.getTileDimensions(dimensions);
final double normalizedSrsTileCoordinateY = Math.abs(coordinate.getY() - boundsCorner.getY());
final double normalizedSrsTileCoordinateX = Math.abs(coordinate.getX() - boundsCorner.getX());
final int tileY = (int)Math.floor(normalizedSrsTileCoordinateY / tileDimensions.getHeight());
final int tileX = (int)Math.floor(normalizedSrsTileCoordinateX / tileDimensions.getWidth());
return new Coordinate<>(tileY, tileX);
}
@Override
public CrsCoordinate tileToCrsCoordinate(final int row,
final int column,
final TileMatrixDimensions dimensions,
final TileOrigin tileOrigin)
{
if(row < 0)
{
throw new IllegalArgumentException("Row must be at least 0");
}
if(column < 0)
{
throw new IllegalArgumentException("Column must be at least 0");
}
if(dimensions == null)
{
throw new IllegalArgumentException("Tile matrix dimensions may not be null");
}
if(tileOrigin == null)
{
throw new IllegalArgumentException("Origin may not be null");
}
final double tileHeight = EarthEquatorialCircumfrence / dimensions.getHeight();
final double tileWidth = EarthEquatorialCircumfrence / dimensions.getWidth();
final double originShift = (EarthEquatorialCircumfrence / 2.0);
final Coordinate<Integer> tileCoordinate = TileOrigin.LowerLeft.transform(tileOrigin,
row,
column,
dimensions);
return new CrsCoordinate((tileCoordinate.getY() * tileHeight) - originShift,
(tileCoordinate.getX() * tileWidth) - originShift,
this.getCoordinateReferenceSystem());
}
@Override
public Dimensions getTileDimensions(final TileMatrixDimensions dimensions)
{
return new Dimensions(EarthEquatorialCircumfrence / dimensions.getHeight(),
EarthEquatorialCircumfrence / dimensions.getWidth());
}
@Override
public CoordinateReferenceSystem getCoordinateReferenceSystem()
{
return SphericalMercatorCrsProfile.CoordinateReferenceSystem;
}
@Override
public Coordinate<Double> toGlobalGeodetic(final Coordinate<Double> coordinate)
{
// TODO algorithm documentation
return new Coordinate<>(Math.toDegrees(2 * Math.atan(Math.exp(coordinate.getY() / EarthEquatorialRadius)) - Math.PI / 2),
Math.toDegrees(coordinate.getX() / EarthEquatorialRadius));
}
@Override
public BoundingBox getBounds()
{
return Bounds;
}
/**
* Datum's spheroid's semi-major axis (radius of earth) in meters
*/
public static final double EarthEquatorialRadius = 6378137.0;
public static final BoundingBox Bounds = new BoundingBox(-Math.PI * EarthEquatorialRadius,
-Math.PI * EarthEquatorialRadius,
Math.PI * EarthEquatorialRadius,
Math.PI * EarthEquatorialRadius);
/**
* Earth's equatorial circumference (based on the datum's spheroid's semi-major axis, radius) in meters
*/
public static final double EarthEquatorialCircumfrence = 2.0 * Math.PI * EarthEquatorialRadius;
private final static CoordinateReferenceSystem CoordinateReferenceSystem = new CoordinateReferenceSystem("EPSG", 3857);
}
| Added Check in SphericalMercator for Same CRS
| Common/src/com/rgi/common/coordinate/referencesystem/profile/SphericalMercatorCrsProfile.java | Added Check in SphericalMercator for Same CRS |
|
Java | mit | bc7988a3ed96f19fc49fd30d33ac7484db295934 | 0 | lanen/GDXJam,Twiebs/GDXJam,Twiebs/GDXJam,Twiebs/GDXJam,libgdx-jam/GDXJam,libgdx-jam/GDXJam,lanen/GDXJam,lanen/GDXJam,libgdx-jam/GDXJam | package com.gdxjam.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Input.Keys;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.GL20;
import com.gdxjam.GameManager;
public abstract class AbstractScreen implements Screen {
@Override
public void show() {
}
@Override
public void render(float delta) {
if (Gdx.input.isKeyJustPressed(Keys.ESCAPE)) {
GameManager.setScreen(new SelectorScreen());
}
Gdx.gl20.glClear(GL20.GL_COLOR_BUFFER_BIT);
Gdx.gl20.glClearColor(0, 0, 0, 1);
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
// TODO Auto-generated method stub
}
@Override
public void resume() {
// TODO Auto-generated method stub
}
@Override
public void hide() {
// TODO Auto-generated method stub
}
@Override
public void dispose() {
}
}
| core/src/com/gdxjam/screens/AbstractScreen.java | package com.gdxjam.screens;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Screen;
import com.badlogic.gdx.graphics.GL20;
public abstract class AbstractScreen implements Screen {
@Override
public void show() {
}
@Override
public void render(float delta) {
Gdx.gl20.glClear(GL20.GL_COLOR_BUFFER_BIT);
Gdx.gl20.glClearColor(0, 0, 0, 1);
}
@Override
public void resize(int width, int height) {
}
@Override
public void pause() {
// TODO Auto-generated method stub
}
@Override
public void resume() {
// TODO Auto-generated method stub
}
@Override
public void hide() {
// TODO Auto-generated method stub
}
@Override
public void dispose() {
}
}
| Added back button to abstract screen (ESC)
| core/src/com/gdxjam/screens/AbstractScreen.java | Added back button to abstract screen (ESC) |
|
Java | mit | 87dfbe509020f9761b0b431bc6e2dc33a970e889 | 0 | microsoftgraph/msgraph-sdk-java-core | package com.microsoft.graph.authentication;
import com.azure.core.credential.AccessToken;
import com.azure.core.credential.TokenCredential;
import com.azure.core.credential.TokenRequestContext;
import com.microsoft.graph.exceptions.AuthenticationException;
import com.microsoft.graph.exceptions.Error;
import com.microsoft.graph.httpcore.IHttpRequest;
import okhttp3.Request;
import com.microsoft.graph.exceptions.ErrorConstants.*;
import java.util.List;
public class TokenCredentialAuthProvider implements ICoreAuthenticationProvider , IAuthenticationProvider {
//TokenCredential expected form user
private TokenCredential tokenCredential;
//Context options which can be optionally set by the user
private TokenRequestContext context;
//Access token to be retrieved
private AccessToken accessToken;
/**
* Creates an Authentication provider using a passed in TokenCredential
*
* @param tokenCredential Credential object inheriting the TokenCredential interface used to instantiate the Auth Provider
* @throws AuthenticationException exception occurs if the TokenCredential parameter is null
*/
public TokenCredentialAuthProvider(TokenCredential tokenCredential) throws AuthenticationException {
if(tokenCredential == null) {
throw new AuthenticationException(new Error(Codes.InvalidArgument,
String.format(Messages.NullParameter, "TokenCredential"))
,new IllegalArgumentException());
}
this.tokenCredential = tokenCredential;
this.context = new TokenRequestContext();
}
/**
*Created an Authentication provider using a TokenCredential and list of scopes
*
* @param tokenCredential Credential object inheriting the TokenCredential interface used to instantiate the Auth Provider
* @param scopes Specified desired scopes of the Auth Provider
* @throws AuthenticationException exception occurs if the TokenCredential parameter is null
*/
public TokenCredentialAuthProvider(TokenCredential tokenCredential, List<String> scopes) throws AuthenticationException {
this(tokenCredential);
this.context.setScopes(scopes);
}
/**
* Authenticates the request
*
* @param request the request to authenticate
*/
@Override
public void authenticateRequest(IHttpRequest request) {
request.addHeader(AuthConstants.AUTHORIZATION_HEADER, AuthConstants.BEARER + getAccessToken());
}
/**
* Authenticates the request
*
* @param request the request to authenticate
* @return Request with Authorization header added to it
*/
@Override
public Request authenticateRequest(Request request) {
return request.newBuilder()
.addHeader(AuthConstants.AUTHORIZATION_HEADER, AuthConstants.BEARER + getAccessToken())
.build();
}
/**
* Returns an AccessToken as a string
*
* @return String representing the retrieved AccessToken
*/
String getAccessToken() {
this.tokenCredential.getToken(this.context).doOnError(exception -> exception.printStackTrace())
.subscribe(token -> {
this.accessToken = token;
});
return this.accessToken.getToken();
}
}
| src/main/java/com/microsoft/graph/authentication/TokenCredentialAuthProvider.java | package com.microsoft.graph.authentication;
import com.azure.core.credential.AccessToken;
import com.azure.core.credential.TokenCredential;
import com.azure.core.credential.TokenRequestContext;
import com.microsoft.graph.exceptions.AuthenticationException;
import com.microsoft.graph.exceptions.Error;
import com.microsoft.graph.httpcore.IHttpRequest;
import okhttp3.Request;
import com.microsoft.graph.exceptions.ErrorConstants.*;
import java.util.List;
public class TokenCredentialAuthProvider implements ICoreAuthenticationProvider , IAuthenticationProvider {
//TokenCredential expected form user
private TokenCredential tokenCredential;
//Context options which can be optionally set by the user
private TokenRequestContext context;
//Access token to be retrieved
private AccessToken accessToken;
/**
* Creates an Authentication provider using a passed in TokenCredential
*
* @param tokenCredential Credential object inheriting the TokenCredential interface used to instantiate the Auth Provider
* @throws AuthenticationException exception occurs if the TokenCredential parameter is null
*/
public TokenCredentialAuthProvider(TokenCredential tokenCredential) throws AuthenticationException {
if(tokenCredential == null) {
throw new AuthenticationException(new Error(Codes.InvalidArgument,
String.format(Messages.NullParameter, "TokenCredential"))
,new IllegalArgumentException());
}
this.tokenCredential = tokenCredential;
this.context = new TokenRequestContext();
}
/**
*Created an Authentication provider using a TokenCredential and list of scopes
*
* @param tokenCredential Credential object inheriting the TokenCredential interface used to instantiate the Auth Provider
* @param scopes Specified desired scopes of the Auth Provider
* @throws AuthenticationException
*/
public TokenCredentialAuthProvider(TokenCredential tokenCredential, List<String> scopes) throws AuthenticationException {
this(tokenCredential);
this.context.setScopes(scopes);
}
/**
* Authenticates the request
*
* @param request the request to authenticate
*/
@Override
public void authenticateRequest(IHttpRequest request) {
request.addHeader(AuthConstants.AUTHORIZATION_HEADER, AuthConstants.BEARER + getAccessToken());
}
/**
* Authenticates the request
*
* @param request the request to authenticate
* @return Request with Authorization header added to it
*/
@Override
public Request authenticateRequest(Request request) {
return request.newBuilder()
.addHeader(AuthConstants.AUTHORIZATION_HEADER, AuthConstants.BEARER + getAccessToken())
.build();
}
/**
* Returns an AccessToken as a string
*
* @return String representing the retrieved AccessToken
*/
String getAccessToken() {
this.tokenCredential.getToken(this.context).doOnError(exception -> exception.printStackTrace())
.subscribe(token -> {
this.accessToken = token;
});
return this.accessToken.getToken();
}
}
| Update TokenCredentialAuthProvider.java | src/main/java/com/microsoft/graph/authentication/TokenCredentialAuthProvider.java | Update TokenCredentialAuthProvider.java |
|
Java | epl-1.0 | 2093da3da6fb20feaa686686c137d3c533f13cdd | 0 | drbgfc/mdht,vadimnehta/mdht,drbgfc/mdht,drbgfc/mdht,vadimnehta/mdht,drbgfc/mdht,vadimnehta/mdht,mdht/mdht,drbgfc/mdht,mdht/mdht,sarpkayanehta/mdht,drbgfc/mdht,sarpkayanehta/mdht,sarpkayanehta/mdht,vadimnehta/mdht,mdht/mdht,sarpkayanehta/mdht,mdht/mdht,vadimnehta/mdht,sarpkayanehta/mdht,mdht/mdht,vadimnehta/mdht,sarpkayanehta/mdht | /*******************************************************************************
* Copyright (c) 2006, 2010 David A Carlson.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*
* $Id$
*******************************************************************************/
package org.openhealthtools.mdht.uml.common.util;
import java.util.Comparator;
import org.eclipse.uml2.uml.NamedElement;
/**
* Compare two NamedElement objects, for sorting collections by name.
*
* @version $Id: $
*/
public class NamedElementComparator implements Comparator<NamedElement> {
/* (non-Javadoc)
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
public int compare(NamedElement o1, NamedElement o2) {
String name1 = ((NamedElement) o1).getName();
String name2 = ((NamedElement) o2).getName();
if (name1 != null && name2 != null)
return name1.compareToIgnoreCase(name2);
else
return 0;
}
}
| core/plugins/org.openhealthtools.mdht.uml.common/src/org/openhealthtools/mdht/uml/common/util/NamedElementComparator.java | /*******************************************************************************
* Copyright (c) 2006, 2009 David A Carlson.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* David A Carlson (XMLmodeling.com) - initial API and implementation
*
* $Id$
*******************************************************************************/
package org.openhealthtools.mdht.uml.common.util;
import java.util.Comparator;
import org.eclipse.uml2.uml.NamedElement;
/**
* Compare two NamedElement objects, for sorting collections by name.
*
* @version $Id: $
*/
public class NamedElementComparator implements Comparator {
/* (non-Javadoc)
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
public int compare(Object o1, Object o2) {
if (NamedElement.class.isInstance(o1) && NamedElement.class.isInstance(o2)) {
String name1 = ((NamedElement) o1).getName();
String name2 = ((NamedElement) o2).getName();
if (name1 != null && name2 != null)
return name1.compareToIgnoreCase(name2);
else
return 0;
}
else {
return 0;
}
}
}
| parameterize Comparator superclass type | core/plugins/org.openhealthtools.mdht.uml.common/src/org/openhealthtools/mdht/uml/common/util/NamedElementComparator.java | parameterize Comparator superclass type |
|
Java | epl-1.0 | 35a0251e518eb059d254ce7b10681000023a8f15 | 0 | jacarrichan/org.eclipse.paho.mqtt.java | /*
============================================================================
Licensed Materials - Property of IBM
5747-SM3
(C) Copyright IBM Corp. 1999, 2012 All Rights Reserved.
US Government Users Restricted Rights - Use, duplication or
disclosure restricted by GSA ADP Schedule Contract with
IBM Corp.
============================================================================
*/
package org.eclipse.paho.android.service;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import org.eclipse.paho.client.mqttv3.IMqttActionListener;
import org.eclipse.paho.client.mqttv3.IMqttAsyncClient;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.IMqttToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttClientPersistence;
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.eclipse.paho.client.mqttv3.MqttPersistenceException;
import org.eclipse.paho.client.mqttv3.MqttProtocolVersion;
import org.eclipse.paho.client.mqttv3.MqttSecurityException;
import org.eclipse.paho.client.mqttv3.MqttToken;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.os.IBinder;
import android.util.SparseArray;
/**
* Enables an android application to communicate with an MQTT server using non-blocking methods.
* <p>
* Implementation of the MQTT asynchronous client interface {@link IMqttAsyncClient} , using the MQTT
* android service to actually interface with MQTT server. It provides android applications a simple programming interface to all features of the MQTT version 3.1
* specification including:
* <ul>
* <li>connect
* <li>publish
* <li>subscribe
* <li>unsubscribe
* <li>disconnect
* </ul>
* </p>
*/
public class MqttAndroidClient extends BroadcastReceiver implements
IMqttAsyncClient {
/**
*
* The Acknowledgment mode for messages received from {@link MqttCallback#messageArrived(String, MqttMessage)}
*
*/
public enum Ack {
/**
* As soon as the {@link MqttCallback#messageArrived(String, MqttMessage)} returns
* the message has been acknowledged as received .
*/
AUTO_ACK,
/**
* When {@link MqttCallback#messageArrived(String, MqttMessage)} returns the message
* will not be acknowledged as received, the application will have to make an acknowledgment call
* to {@link MqttAndroidClient} using {@link MqttAndroidClient#acknowledgeMessage(String)}
*/
MANUAL_ACK
}
private static final String SERVICE_NAME = "org.eclipse.paho.android.service.MqttService";
private static final int BIND_SERVICE_FLAG = 0;
private static ExecutorService pool = Executors.newCachedThreadPool();
/**
* ServiceConnection to process when we bind to our service
*/
private final class MyServiceConnection implements ServiceConnection {
@Override
public void onServiceConnected(ComponentName name, IBinder binder) {
mqttService = ((MqttServiceBinder) binder).getService();
// now that we have the service available, we can actually
// connect...
doConnect();
}
@Override
public void onServiceDisconnected(ComponentName name) {
mqttService = null;
}
}
// Listener for when the service is connected or disconnected
private MyServiceConnection serviceConnection = new MyServiceConnection();
// The Android Service which will process our mqtt calls
private MqttService mqttService;
// An identifier for the underlying client connection, which we can pass to
// the service
private String clientHandle;
Context myContext;
// We hold the various tokens in a collection and pass identifiers for them
// to the service
private SparseArray<IMqttToken> tokenMap = new SparseArray<IMqttToken>();
private int tokenNumber = 0;
// Connection data
private String serverURI;
private String clientId;
private MqttClientPersistence persistence = null;
private MqttConnectOptions connectOptions;
private IMqttToken connectToken;
// The MqttCallback provided by the application
private MqttCallback callback;
private MqttTraceHandler traceCallback;
//The acknowledgment that a message has been processed by the application
private Ack messageAck;
private boolean traceEnabled = false;
/**
* Constructor - create an MqttAndroidClient that can be used to communicate with an MQTT server on android
*
* @param context
* object used to pass context to the callback.
* @param serverURI
* specifies the protocol, host name and port to be used to
* connect to an MQTT server
* @param clientId
* specifies the name by which this connection should be
* identified to the server
*/
public MqttAndroidClient(Context context, String serverURI,
String clientId) {
this(context, serverURI, clientId, null, Ack.AUTO_ACK);
}
/**
* Constructor - create an MqttAndroidClient that can be used to communicate with an MQTT server on android
* @param ctx Application's context
* @param serverURI specifies the protocol, host name and port to be used to connect to an MQTT server
* @param clientId specifies the name by which this connection should be identified to the server
* @param ackType how the application wishes to acknowledge a message has been processed
*/
public MqttAndroidClient(Context ctx, String serverURI, String clientId, Ack ackType) {
this(ctx, serverURI, clientId, null, ackType);
}
/**
* Constructor - create an MqttAndroidClient that can be used to communicate with an MQTT server on android
* @param ctx Application's context
* @param serverURI specifies the protocol, host name and port to be used to connect to an MQTT server
* @param clientId specifies the name by which this connection should be identified to the server
* @param persistence The object to use to store persisted data
*/
public MqttAndroidClient(Context ctx, String serverURI, String clientId, MqttClientPersistence persistence) {
this(ctx, serverURI, clientId, null, Ack.AUTO_ACK);
}
/**
* constructor
*
* @param context
* used to pass context to the callback.
* @param serverURI
* specifies the protocol, host name and port to be used to
* connect to an MQTT server
* @param clientId
* specifies the name by which this connection should be
* identified to the server
* @param persistence
* the persistence class to use to store in-flight message. If null then the
* default persistence mechanism is used
* @param ackType
* how the application wishes to acknowledge a message has been processed.
*/
public MqttAndroidClient(Context context, String serverURI,
String clientId, MqttClientPersistence persistence, Ack ackType) {
myContext = context;
this.serverURI = serverURI;
this.clientId = clientId;
this.persistence = persistence;
messageAck = ackType;
}
/**
* Determines if this client is currently connected to the server.
*
* @return <code>true</code> if connected, <code>false</code> otherwise.
*/
@Override
public boolean isConnected() {
return mqttService.isConnected(clientHandle);
}
/**
* Returns the client ID used by this client.
* <p>All clients connected to the
* same server or server farm must have a unique ID.
* </p>
*
* @return the client ID used by this client.
*/
@Override
public String getClientId() {
return clientId;
}
/**
* Returns the URI address of the server used by this client.
* <p>The format of the returned String is the same as that used on the constructor.</p>
*
* @return the server's address, as a URI String.
*/
@Override
public String getServerURI() {
return serverURI;
}
/**
* Close the client
* Releases all resource associated with the client. After the client has
* been closed it cannot be reused. For instance attempts to connect will fail.
* @throws MqttException if the client is not disconnected.
*/
@Override
public void close() {
if (clientHandle == null) {
clientHandle = mqttService.getClient(serverURI, clientId, persistence);
}
mqttService.close(clientHandle);
}
/**
* Connects to an MQTT server using the default options.
* <p>The default options are specified in {@link MqttConnectOptions} class.
* </p>
*
* @throws MqttException for any connected problems
* @return token used to track and wait for the connect to complete. The token
* will be passed to the callback methods if a callback is set.
* @see #connect(MqttConnectOptions, Object, IMqttActionListener)
*/
@Override
public IMqttToken connect() throws MqttException {
return connect(null, null);
}
/**
* Connects to an MQTT server using the provided connect options.
* <p>The connection will be established using the options specified in the
* {@link MqttConnectOptions} parameter.
* </p>
*
* @param options a set of connection parameters that override the defaults.
* @throws MqttException for any connected problems
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @see #connect(MqttConnectOptions, Object, IMqttActionListener)
*/
@Override
public IMqttToken connect(MqttConnectOptions options) throws MqttException {
return connect(options, null, null);
}
/**
* Connects to an MQTT server using the default options.
* <p>The default options are specified in {@link MqttConnectOptions} class.
* </p>
*
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the connect completes. Use
* null if not required.
* @throws MqttException for any connected problems
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @see #connect(MqttConnectOptions, Object, IMqttActionListener)
*/
@Override
public IMqttToken connect(Object userContext, IMqttActionListener callback)
throws MqttException {
return connect(new MqttConnectOptions(), userContext, callback);
}
/**
* Connects to an MQTT server using the specified options.
* <p>The server to connect to is specified on the constructor.
* It is recommended to call {@link #setCallback(MqttCallback)} prior to
* connecting in order that messages destined for the client can be accepted
* as soon as the client is connected.
* </p>
* <p>The method returns control before the connect completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the returned token {@link IMqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener}</li>
* </ul>
* </p>
*
* @param options a set of connection parameters that override the defaults.
* @param userContext optional object for used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the connect completes. Use
* null if not required.
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for any connected problems, including communication errors
*/
@Override
public IMqttToken connect(MqttConnectOptions options, Object userContext,
IMqttActionListener callback) throws MqttException {
//check to see if there is a network connection where we can send data before attempting the connect
ConnectivityManager conManager = (ConnectivityManager) myContext.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo netInf = conManager.getActiveNetworkInfo();
if ((netInf == null) || !netInf.isConnected()) {
throw new MqttException(MqttException.REASON_CODE_BROKER_UNAVAILABLE);
}
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
connectOptions = options;
connectToken = token;
/*
* The actual connection depends on the service, which we start and bind
* to here, but which we can't actually use until the serviceConnection
* onServiceConnected() method has run (asynchronously), so the
* connection itself takes place in the onServiceConnected() method
*/
if (mqttService == null) { // First time - must bind to the service
Intent serviceStartIntent = new Intent();
serviceStartIntent.setClassName(myContext, SERVICE_NAME);
Object service = myContext.startService(serviceStartIntent);
if (service == null) {
IMqttActionListener listener = token.getActionCallback();
if (listener != null) {
listener.onFailure(token, new RuntimeException(
"cannot start service " + SERVICE_NAME));
}
}
// We bind with BIND_SERVICE_FLAG (0), leaving us the manage the lifecycle
// until the last time it is stopped by a call to stopService()
myContext.startService(serviceStartIntent);
myContext.bindService(serviceStartIntent, serviceConnection,
Context.BIND_AUTO_CREATE);
IntentFilter filter = new IntentFilter();
filter.addAction(MqttServiceConstants.CALLBACK_TO_ACTIVITY);
myContext.registerReceiver(this, filter);
}
else {
pool.execute(new Runnable() {
@Override
public void run() {
doConnect();
}
});
}
return token;
}
/**
* Actually do the mqtt connect operation
*/
private void doConnect() {
if (clientHandle == null) {
clientHandle = mqttService.getClient(serverURI, clientId,
persistence);
}
mqttService.setTraceEnabled(traceEnabled);
mqttService.setTraceCallbackId(clientHandle);
String activityToken = storeToken(connectToken);
try {
mqttService.connect(clientHandle, connectOptions, null,
activityToken);
}
catch (MqttException e) {
IMqttActionListener listener = connectToken.getActionCallback();
if (listener != null) {
listener.onFailure(connectToken, e);
}
}
}
/**
* Disconnects from the server.
* <p>An attempt is made to quiesce the client allowing outstanding
* work to complete before disconnecting. It will wait
* for a maximum of 30 seconds for work to quiesce before disconnecting.
* This method must not be called from inside {@link MqttCallback} methods.
* </p>
*
* @return token used to track and wait for disconnect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for problems encountered while disconnecting
* @see #disconnect(long, Object, IMqttActionListener)
*/
@Override
public IMqttToken disconnect() throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, null,
(IMqttActionListener) null);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, null, activityToken);
return token;
}
/**
* Disconnects from the server.
* <p>An attempt is made to quiesce the client allowing outstanding
* work to complete before disconnecting. It will wait
* for a maximum of the specified quiesce time for work to complete before disconnecting.
* This method must not be called from inside {@link MqttCallback} methods.
* </p>
* @param quiesceTimeout the amount of time in milliseconds to allow for
* existing work to finish before disconnecting. A value of zero or less
* means the client will not quiesce.
* @return token used to track and wait for disconnect to complete. The token
* will be passed to the callback methods if a callback is set.
* @throws MqttException for problems encountered while disconnecting
* @see #disconnect(long, Object, IMqttActionListener)
*/
@Override
public IMqttToken disconnect(long quiesceTimeout) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, null,
(IMqttActionListener) null);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, quiesceTimeout, null,
activityToken);
return token;
}
/**
* Disconnects from the server.
* <p>An attempt is made to quiesce the client allowing outstanding
* work to complete before disconnecting. It will wait
* for a maximum of 30 seconds for work to quiesce before disconnecting.
* This method must not be called from inside {@link MqttCallback} methods.
* </p>
*
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the disconnect completes. Use
* null if not required.
* @return token used to track and wait for the disconnect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for problems encountered while disconnecting
* @see #disconnect(long, Object, IMqttActionListener)
*/
@Override
public IMqttToken disconnect(Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, null, activityToken);
return token;
}
/**
* Disconnects from the server.
* <p>
* The client will wait for {@link MqttCallback} methods to
* complete. It will then wait for up to the quiesce timeout to allow for
* work which has already been initiated to complete. For instance when a QoS 2
* message has started flowing to the server but the QoS 2 flow has not completed.It
* prevents new messages being accepted and does not send any messages that have
* been accepted but not yet started delivery across the network to the server. When
* work has completed or after the quiesce timeout, the client will disconnect from
* the server. If the cleanSession flag was set to false and is set to false the
* next time a connection is made QoS 1 and 2 messages that
* were not previously delivered will be delivered.</p>
* <p>This method must not be called from inside {@link MqttCallback} methods.</p>
* <p>The method returns control before the disconnect completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the returned token {@link IMqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener}</li>
* </ul>
* </p>
*
* @param quiesceTimeout the amount of time in milliseconds to allow for
* existing work to finish before disconnecting. A value of zero or less
* means the client will not quiesce.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the disconnect completes. Use
* null if not required.
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for problems encountered while disconnecting
*/
@Override
public IMqttToken disconnect(long quiesceTimeout, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, quiesceTimeout, null,
activityToken);
return token;
}
/**
* Publishes a message to a topic on the server.
* <p>A convenience method, which will
* create a new {@link MqttMessage} object with a byte array payload and the
* specified QoS, and then publish it.
* </p>
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param payload the byte array to use as the payload
* @param qos the Quality of Service to deliver the message at. Valid values are 0, 1 or 2.
* @param retained whether or not this message should be retained by the server.
* @return token used to track and wait for the publish to complete. The token
* will be passed to any callback that has been set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance if too many messages are being processed.
* @see #publish(String, MqttMessage, Object, IMqttActionListener)
*/
@Override
public IMqttDeliveryToken publish(String topic, byte[] payload, int qos,
boolean retained) throws MqttException, MqttPersistenceException {
return publish(topic, payload, qos, retained, null, null);
}
/**
* Publishes a message to a topic on the server.
* Takes an {@link MqttMessage} message and delivers it to the server at the
* requested quality of service.
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param message to deliver to the server
* @return token used to track and wait for the publish to complete. The token
* will be passed to any callback that has been set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance client not connected.
* @see #publish(String, MqttMessage, Object, IMqttActionListener)
*/
@Override
public IMqttDeliveryToken publish(String topic, MqttMessage message)
throws MqttException, MqttPersistenceException {
return publish(topic, message, null, null);
}
/**
* Publishes a message to a topic on the server.
* <p>A convenience method, which will
* create a new {@link MqttMessage} object with a byte array payload and the
* specified QoS, and then publish it.
* </p>
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param payload the byte array to use as the payload
* @param qos the Quality of Service to deliver the message at. Valid values are 0, 1 or 2.
* @param retained whether or not this message should be retained by the server.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when message delivery
* hsa completed to the requested quality of service
* @return token used to track and wait for the publish to complete. The token
* will be passed to any callback that has been set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance client not connected.
* @see #publish(String, MqttMessage, Object, IMqttActionListener)
*/
@Override
public IMqttDeliveryToken publish(String topic, byte[] payload, int qos,
boolean retained, Object userContext, IMqttActionListener callback)
throws MqttException, MqttPersistenceException {
MqttMessage message = new MqttMessage(payload);
message.setQos(qos);
message.setRetained(retained);
MqttDeliveryTokenAndroid token = new MqttDeliveryTokenAndroid(
this, userContext, callback, message);
String activityToken = storeToken(token);
IMqttDeliveryToken internalToken = mqttService.publish(clientHandle,
topic, payload, qos, retained, null, activityToken);
token.setDelegate(internalToken);
return token;
}
/**
* Publishes a message to a topic on the server.
* <p>
* Once this method has returned cleanly, the message has been accepted for publication by the
* client and will be delivered on a background thread.
* In the event the connection fails or the client stops. Messages will be delivered to the
* requested quality of service once the connection is re-established to the server on condition that:
* <ul>
* <li>The connection is re-established with the same clientID
* <li>The original connection was made with (@link MqttConnectOptions#setCleanSession(boolean)}
* set to false
* <li>The connection is re-established with (@link MqttConnectOptions#setCleanSession(boolean)}
* set to false
* <li>Depending when the failure occurs QoS 0 messages may not be delivered.
* </ul>
* </p>
*
* <p>When building an application,
* the design of the topic tree should take into account the following principles
* of topic name syntax and semantics:</p>
*
* <ul>
* <li>A topic must be at least one character long.</li>
* <li>Topic names are case sensitive. For example, <em>ACCOUNTS</em> and <em>Accounts</em> are
* two different topics.</li>
* <li>Topic names can include the space character. For example, <em>Accounts
* payable</em> is a valid topic.</li>
* <li>A leading "/" creates a distinct topic. For example, <em>/finance</em> is
* different from <em>finance</em>. <em>/finance</em> matches "+/+" and "/+", but
* not "+".</li>
* <li>Do not include the null character (Unicode <samp class="codeph">\x0000</samp>) in
* any topic.</li>
* </ul>
*
* <p>The following principles apply to the construction and content of a topic
* tree:</p>
*
* <ul>
* <li>The length is limited to 64k but within that there are no limits to the
* number of levels in a topic tree.</li>
* <li>There can be any number of root nodes; that is, there can be any number
* of topic trees.</li>
* </ul>
* </p>
* <p>The method returns control before the publish completes. Completion can
* be tracked by:
* <ul>
* <li>Setting an {@link IMqttAsyncClient#setCallback(MqttCallback)} where the
* {@link MqttCallback#deliveryComplete(IMqttDeliveryToken)}
* method will be called.</li>
* <li>Waiting on the returned token {@link MqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener} to this method</li>
* </ul>
* </p>
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param message to deliver to the server
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when message delivery
* has completed to the requested quality of service
* @return token used to track and wait for the publish to complete. The token
* will be passed to callback methods if set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance client not connected.
* @see MqttMessage
*/
@Override
public IMqttDeliveryToken publish(String topic, MqttMessage message,
Object userContext, IMqttActionListener callback)
throws MqttException, MqttPersistenceException {
MqttDeliveryTokenAndroid token = new MqttDeliveryTokenAndroid(
this, userContext, callback, message);
String activityToken = storeToken(token);
IMqttDeliveryToken internalToken = mqttService.publish(clientHandle,
topic, message, null, activityToken);
token.setDelegate(internalToken);
return token;
}
/**
* Subscribe to a topic, which may include wildcards.
*
* @param topic the topic to subscribe to, which can include wildcards.
* @param qos the maximum quality of service at which to subscribe. Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttSecurityException for security related problems
* @throws MqttException for non security related problems
*
* @see #subscribe(String[], int[], Object, IMqttActionListener)
*/
@Override
public IMqttToken subscribe(String topic, int qos) throws MqttException,
MqttSecurityException {
return subscribe(topic, qos, null, null);
}
/**
* Subscribe to multiple topics, each of which may include wildcards.
*
* <p>Provides an optimized way to subscribe to multiple topics compared to
* subscribing to each one individually.</p>
*
* @param topic one or more topics to subscribe to, which can include wildcards
* @param qos the maximum quality of service at which to subscribe. Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttSecurityException for security related problems
* @throws MqttException for non security related problems
*
* @see #subscribe(String[], int[], Object, IMqttActionListener)
*/
@Override
public IMqttToken subscribe(String[] topic, int[] qos)
throws MqttException, MqttSecurityException {
return subscribe(topic, qos, null, null);
}
/**
* Subscribe to a topic, which may include wildcards.
*
* @param topic the topic to subscribe to, which can include wildcards.
* @param qos the maximum quality of service at which to subscribe. Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when subscribe
* has completed
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error registering the subscription.
*
* @see #subscribe(String[], int[], Object, IMqttActionListener)
*/
@Override
public IMqttToken subscribe(String topic, int qos, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback, new String[]{topic});
String activityToken = storeToken(token);
mqttService.subscribe(clientHandle, topic, qos, null, activityToken);
return token;
}
/**
* Subscribes to multiple topics, each of which may include wildcards.
* <p>Provides an optimized way to subscribe to multiple topics compared to
* subscribing to each one individually.</p>
* <p>The {@link #setCallback(MqttCallback)} method
* should be called before this method, otherwise any received messages
* will be discarded.
* </p>
* <p>
* If (@link MqttConnectOptions#setCleanSession(boolean)} was set to true
* when when connecting to the server then the subscription remains in place
* until either:
* <ul>
* <li>The client disconnects</li>
* <li>An unsubscribe method is called to un-subscribe the topic</li>
* </li>
* </p>
* <p>
* If (@link MqttConnectOptions#setCleanSession(boolean)} was set to false
* when connecting to the server then the subscription remains in place
* until either:
* <ul>
* <li>An unsubscribe method is called to unsubscribe the topic</li>
* <li>The next time the client connects with cleanSession set to true</ul>
* </li>
* With cleanSession set to false the MQTT server will store messages on
* behalf of the client when the client is not connected. The next time the
* client connects with the <bold>same client ID</bold> the server will
* deliver the stored messages to the client.
* </p>
*
* <p>The "topic filter" string used when subscribing
* may contain special characters, which allow you to subscribe to multiple topics
* at once.</p>
* <p>The topic level separator is used to introduce structure into the topic, and
* can therefore be specified within the topic for that purpose. The multi-level
* wildcard and single-level wildcard can be used for subscriptions, but they
* cannot be used within a topic by the publisher of a message.
* <dl>
* <dt>Topic level separator</dt>
* <dd>The forward slash (/) is used to separate each level within
* a topic tree and provide a hierarchical structure to the topic space. The
* use of the topic level separator is significant when the two wildcard characters
* are encountered in topics specified by subscribers.</dd>
*
* <dt>Multi-level wildcard</dt>
* <dd><p>The number sign (#) is a wildcard character that matches
* any number of levels within a topic. For example, if you subscribe to
* <span><span class="filepath">finance/stock/ibm/#</span></span>, you receive
* messages on these topics:
* <pre> finance/stock/ibm<br /> finance/stock/ibm/closingprice<br /> finance/stock/ibm/currentprice</pre>
* </p>
* <p>The multi-level wildcard
* can represent zero or more levels. Therefore, <em>finance/#</em> can also match
* the singular <em>finance</em>, where <em>#</em> represents zero levels. The topic
* level separator is meaningless in this context, because there are no levels
* to separate.</p>
*
* <p>The <span>multi-level</span> wildcard can
* be specified only on its own or next to the topic level separator character.
* Therefore, <em>#</em> and <em>finance/#</em> are both valid, but <em>finance#</em> is
* not valid. <span>The multi-level wildcard must be the last character
* used within the topic tree. For example, <em>finance/#</em> is valid but
* <em>finance/#/closingprice</em> is not valid.</span></p></dd>
*
* <dt>Single-level wildcard</dt>
* <dd><p>The plus sign (+) is a wildcard character that matches only one topic
* level. For example, <em>finance/stock/+</em> matches
* <em>finance/stock/ibm</em> and <em>finance/stock/xyz</em>,
* but not <em>finance/stock/ibm/closingprice</em>. Also, because the single-level
* wildcard matches only a single level, <em>finance/+</em> does not match <em>finance</em>.</p>
*
* <p>Use
* the single-level wildcard at any level in the topic tree, and in conjunction
* with the multilevel wildcard. Specify the single-level wildcard next to the
* topic level separator, except when it is specified on its own. Therefore,
* <em>+</em> and <em>finance/+</em> are both valid, but <em>finance+</em> is
* not valid. <span>The single-level wildcard can be used at the end of the
* topic tree or within the topic tree.
* For example, <em>finance/+</em> and <em>finance/+/ibm</em> are both valid.</span></p>
* </dd>
* </dl>
* </p>
* <p>The method returns control before the subscribe completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the supplied token {@link MqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener} to this method</li>
* </ul>
* </p>
*
* @param topic one or more topics to subscribe to, which can include wildcards
* @param qos the maximum quality of service to subscribe each topic at.Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when subscribe
* has completed
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error registering the subscription.
* @throws IllegalArgumentException if the two supplied arrays are not the same size.
*/
@Override
public IMqttToken subscribe(String[] topic, int[] qos, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback, topic);
String activityToken = storeToken(token);
mqttService.subscribe(clientHandle, topic, qos, null, activityToken);
return token;
}
/**
* Requests the server unsubscribe the client from a topic.
*
* @param topic the topic to unsubscribe from. It must match a topic
* specified on an earlier subscribe.
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*
* @see #unsubscribe(String[], Object, IMqttActionListener)
*/
@Override
public IMqttToken unsubscribe(String topic) throws MqttException {
return unsubscribe(topic, null, null);
}
/**
* Requests the server unsubscribe the client from one or more topics.
*
* @param topic one or more topics to unsubscribe from. Each topic
* must match one specified on an earlier subscribe.
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*
* @see #unsubscribe(String[], Object, IMqttActionListener)
*/
@Override
public IMqttToken unsubscribe(String[] topic) throws MqttException {
return unsubscribe(topic, null, null);
}
/**
* Requests the server unsubscribe the client from a topics.
*
* @param topic the topic to unsubscribe from. It must match a topic
* specified on an earlier subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when unsubscribe
* has completed
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*
* @see #unsubscribe(String[], Object, IMqttActionListener)
*/
@Override
public IMqttToken unsubscribe(String topic, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.unsubscribe(clientHandle, topic, null, activityToken);
return token;
}
/**
* Requests the server unsubscribe the client from one or more topics.
* <p>
* Unsubcribing is the opposite of subscribing. When the server receives the
* unsubscribe request it looks to see if it can find a matching subscription for the
* client and then removes it. After this point the server will send no more
* messages to the client for this subscription.
* </p>
* <p>The topic(s) specified on the unsubscribe must match the topic(s)
* specified in the original subscribe request for the unsubscribe to succeed
* </p>
* <p>The method returns control before the unsubscribe completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the returned token {@link MqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener} to this method</li>
* </ul>
* </p>
*
* @param topic one or more topics to unsubscribe from. Each topic
* must match one specified on an earlier subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when unsubscribe
* has completed
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*/
@Override
public IMqttToken unsubscribe(String[] topic, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.unsubscribe(clientHandle, topic, null, activityToken);
return token;
}
/**
* Returns the delivery tokens for any outstanding publish operations.
* <p>If a client has been restarted and there are messages that were in the
* process of being delivered when the client stopped this method
* returns a token for each in-flight message enabling the delivery to be tracked
* Alternately the {@link MqttCallback#deliveryComplete(IMqttDeliveryToken)}
* callback can be used to track the delivery of outstanding messages.
* </p>
* <p>If a client connects with cleanSession true then there will be no
* delivery tokens as the cleanSession option deletes all earlier state.
* For state to be remembered the client must connect with cleanSession
* set to false</P>
* @return zero or more delivery tokens
*/
@Override
public IMqttDeliveryToken[] getPendingDeliveryTokens() {
return mqttService.getPendingDeliveryTokens(clientHandle);
}
/**
* Sets a callback listener to use for events that happen asynchronously.
* <p>There are a number of events that the listener will be notified about.
* These include:
* <ul>
* <li>A new message has arrived and is ready to be processed</li>
* <li>The connection to the server has been lost</li>
* <li>Delivery of a message to the server has completed</li>
* </ul>
* </p>
* <p>Other events that track the progress of an individual operation such
* as connect and subscribe can be tracked using the {@link MqttToken} returned from
* each non-blocking method or using setting a {@link IMqttActionListener} on the
* non-blocking method.<p>
* @param callback which will be invoked for certain asynchronous events
*
* @see MqttCallback
*/
@Override
public void setCallback(MqttCallback callback) {
this.callback = callback;
}
/**
* identify the callback to be invoked when making tracing calls back into
* the Activity
*
* @param traceCallback handler
*/
public void setTraceCallback(MqttTraceHandler traceCallback) {
this.traceCallback = traceCallback;
// mqttService.setTraceCallbackId(traceCallbackId);
}
/**
* turn tracing on and off
*
* @param traceEnabled set <code>true</code> to enable trace, otherwise, set <code>false</code> to disable trace
*
*/
public void setTraceEnabled(boolean traceEnabled) {
this.traceEnabled = traceEnabled;
if (mqttService !=null)
mqttService.setTraceEnabled(traceEnabled);
}
/**
* <p>
* Process incoming Intent objects representing the results of operations
* and asynchronous activities such as message received
* </p>
* <p>
* <strong>Note:</strong> This is only a public method because the Android
* APIs require such.<br>
* This method should not be explicitly invoked.
* </p>
*/
@Override
public void onReceive(Context context, Intent intent) {
Bundle data = intent.getExtras();
String handleFromIntent = data
.getString(MqttServiceConstants.CALLBACK_CLIENT_HANDLE);
if ((handleFromIntent == null)
|| (!handleFromIntent.equals(clientHandle))) {
return;
}
String action = data.getString(MqttServiceConstants.CALLBACK_ACTION);
if (MqttServiceConstants.CONNECT_ACTION.equals(action)) {
connectAction(data);
}
else if (MqttServiceConstants.MESSAGE_ARRIVED_ACTION.equals(action)) {
messageArrivedAction(data);
}
else if (MqttServiceConstants.SUBSCRIBE_ACTION.equals(action)) {
subscribeAction(data);
}
else if (MqttServiceConstants.UNSUBSCRIBE_ACTION.equals(action)) {
unSubscribeAction(data);
}
else if (MqttServiceConstants.SEND_ACTION.equals(action)) {
sendAction(data);
}
else if (MqttServiceConstants.MESSAGE_DELIVERED_ACTION.equals(action)) {
messageDeliveredAction(data);
}
else if (MqttServiceConstants.ON_CONNECTION_LOST_ACTION
.equals(action)) {
connectionLostAction(data);
}
else if (MqttServiceConstants.DISCONNECT_ACTION.equals(action)) {
disconnected(data);
}
else if (MqttServiceConstants.TRACE_ACTION.equals(action)) {
traceAction(data);
}else{
mqttService.traceError(MqttService.TAG, "Callback action doesn't exist.");
}
}
/**
* Acknowledges a message received on the {@link MqttCallback#messageArrived(String, MqttMessage)}
* @param messageId the messageId received from the MqttMessage (To access this field you need to cast {@link MqttMessage} to {@link ParcelableMqttMessage})
* @return whether or not the message was successfully acknowledged
*/
public boolean acknowledgeMessage(String messageId) {
if (messageAck == Ack.MANUAL_ACK) {
Status status = mqttService.acknowledgeMessageArrival(clientHandle, messageId);
return status == Status.OK;
}
return false;
}
/**
* Process the results of a connection
*
* @param data
*/
private void connectAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
simpleAction(token, data);
}
/**
* Process a notification that we have disconnected
*
* @param data
*/
private void disconnected(Bundle data) {
clientHandle = null; // avoid reuse!
IMqttToken token = removeMqttToken(data);
if (token != null) {
((MqttTokenAndroid) token).notifyComplete();
}
if (callback != null) {
callback.connectionLost(null);
}
}
/**
* Process a Connection Lost notification
*
* @param data
*/
private void connectionLostAction(Bundle data) {
if (callback != null) {
Exception reason = (Exception) data
.getSerializable(MqttServiceConstants.CALLBACK_EXCEPTION);
callback.connectionLost(reason);
}
}
/**
* Common processing for many notifications
*
* @param token
* the token associated with the action being undertake
* @param data
* the result data
*/
private void simpleAction(IMqttToken token, Bundle data) {
if (token != null) {
Status status = (Status) data
.getSerializable(MqttServiceConstants.CALLBACK_STATUS);
if (status == Status.OK) {
((MqttTokenAndroid) token).notifyComplete();
}
else {
Exception exceptionThrown = (Exception) data.getSerializable(MqttServiceConstants.CALLBACK_EXCEPTION);
((MqttTokenAndroid) token)
.notifyFailure(exceptionThrown);
}
}
}
/**
* Process notification of a publish(send) operation
*
* @param data
*/
private void sendAction(Bundle data) {
IMqttToken token = getMqttToken(data); // get, don't remove - will
// remove on delivery
simpleAction(token, data);
}
/**
* Process notification of a subscribe operation
*
* @param data
*/
private void subscribeAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
simpleAction(token, data);
}
/**
* Process notification of an unsubscribe operation
*
* @param data
*/
private void unSubscribeAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
simpleAction(token, data);
}
/**
* Process notification of a published message having been delivered
*
* @param data
*/
private void messageDeliveredAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
if (token != null) {
if (callback != null) {
Status status = (Status) data
.getSerializable(MqttServiceConstants.CALLBACK_STATUS);
if (status == Status.OK) {
callback.deliveryComplete((IMqttDeliveryToken) token);
}
}
}
}
/**
* Process notification of a message's arrival
*
* @param data
*/
private void messageArrivedAction(Bundle data) {
if (callback != null) {
String messageId = data
.getString(MqttServiceConstants.CALLBACK_MESSAGE_ID);
String destinationName = data
.getString(MqttServiceConstants.CALLBACK_DESTINATION_NAME);
ParcelableMqttMessage message = (ParcelableMqttMessage) data
.getParcelable(MqttServiceConstants.CALLBACK_MESSAGE_PARCEL);
try {
if (messageAck == Ack.AUTO_ACK) {
callback.messageArrived(destinationName, message);
mqttService.acknowledgeMessageArrival(clientHandle, messageId);
}
else {
message.messageId = messageId;
callback.messageArrived(destinationName, message);
}
// let the service discard the saved message details
}
catch (Exception e) {
// Swallow the exception
}
}
}
/**
* Process trace action - pass trace data back to the callback
*
* @param data
*/
private void traceAction(Bundle data) {
if (traceCallback != null) {
String severity = data.getString(MqttServiceConstants.CALLBACK_TRACE_SEVERITY);
String message = data.getString(MqttServiceConstants.CALLBACK_ERROR_MESSAGE);
String tag = data.getString(MqttServiceConstants.CALLBACK_TRACE_TAG);
if (severity == MqttServiceConstants.TRACE_DEBUG)
traceCallback.traceDebug(tag, message);
else if (severity == MqttServiceConstants.TRACE_ERROR)
traceCallback.traceError(tag, message);
else
{
Exception e = (Exception) data.getSerializable(MqttServiceConstants.CALLBACK_EXCEPTION);
traceCallback.traceException(tag, message, e);
}
}
}
/**
* @param token
* identifying an operation
* @return an identifier for the token which can be passed to the Android
* Service
*/
private synchronized String storeToken(IMqttToken token) {
tokenMap.put(tokenNumber, token);
return Integer.toString(tokenNumber++);
}
/**
* Get a token identified by a string, and remove it from our map
*
* @param data
* @return the token
*/
private synchronized IMqttToken removeMqttToken(Bundle data) {
String activityToken = data.getString(MqttServiceConstants.CALLBACK_ACTIVITY_TOKEN);
if (activityToken!=null){
int tokenNumber = Integer.parseInt(activityToken);
IMqttToken token = tokenMap.get(tokenNumber);
tokenMap.delete(tokenNumber);
return token;
}
return null;
}
/**
* Get a token identified by a string, and remove it from our map
*
* @param data
* @return the token
*/
private synchronized IMqttToken getMqttToken(Bundle data) {
String activityToken = data
.getString(MqttServiceConstants.CALLBACK_ACTIVITY_TOKEN);
IMqttToken token = tokenMap.get(Integer.parseInt(activityToken));
return token;
}
/**
* Get the SSLSocketFactory using SSL key store and password
* <p>A convenience method, which will help user to create a SSLSocketFactory object</p>
*
* @param keyStore the SSL key store which is generated by some SSL key tool, such as keytool in Java JDK
* @param password the password of the key store which is set when the key store is generated
* @return SSLSocketFactory used to connect to the server with SSL authentication
* @throws MqttSecurityException if there was any error when getting the SSLSocketFactory
*/
public SSLSocketFactory getSSLSocketFactory (InputStream keyStore, String password) throws MqttSecurityException {
try{
SSLContext ctx = null;
SSLSocketFactory sslSockFactory=null;
KeyStore ts;
ts = KeyStore.getInstance("BKS");
ts.load(keyStore, password.toCharArray());
TrustManagerFactory tmf = TrustManagerFactory.getInstance("X509");
tmf.init(ts);
TrustManager[] tm = tmf.getTrustManagers();
ctx = SSLContext.getInstance("SSL");
ctx.init(null, tm, null);
sslSockFactory=ctx.getSocketFactory();
return sslSockFactory;
} catch (KeyStoreException e) {
throw new MqttSecurityException(e);
} catch (CertificateException e) {
throw new MqttSecurityException(e);
} catch (FileNotFoundException e) {
throw new MqttSecurityException(e);
} catch (IOException e) {
throw new MqttSecurityException(e);
} catch (NoSuchAlgorithmException e) {
throw new MqttSecurityException(e);
} catch (KeyManagementException e) {
throw new MqttSecurityException(e);
}
}
@Override
public void disconnectForcibly() throws MqttException {
throw new UnsupportedOperationException();
}
@Override
public void disconnectForcibly(long disconnectTimeout) throws MqttException {
throw new UnsupportedOperationException();
}
@Override
public void disconnectForcibly(long quiesceTimeout, long disconnectTimeout)
throws MqttException {
throw new UnsupportedOperationException();
}
@Override
public MqttProtocolVersion getProtocolVersion() {
throw new UnsupportedOperationException();
}
@Override
public void setProtocolVersion(MqttProtocolVersion version) {
throw new UnsupportedOperationException();
}
}
| org.eclipse.paho.android.service/org.eclipse.paho.android.service/src/org/eclipse/paho/android/service/MqttAndroidClient.java | /*
============================================================================
Licensed Materials - Property of IBM
5747-SM3
(C) Copyright IBM Corp. 1999, 2012 All Rights Reserved.
US Government Users Restricted Rights - Use, duplication or
disclosure restricted by GSA ADP Schedule Contract with
IBM Corp.
============================================================================
*/
package org.eclipse.paho.android.service;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import org.eclipse.paho.client.mqttv3.IMqttActionListener;
import org.eclipse.paho.client.mqttv3.IMqttAsyncClient;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.IMqttToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttClientPersistence;
import org.eclipse.paho.client.mqttv3.MqttConnectOptions;
import org.eclipse.paho.client.mqttv3.MqttException;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import org.eclipse.paho.client.mqttv3.MqttPersistenceException;
import org.eclipse.paho.client.mqttv3.MqttProtocolVersion;
import org.eclipse.paho.client.mqttv3.MqttSecurityException;
import org.eclipse.paho.client.mqttv3.MqttToken;
import android.content.BroadcastReceiver;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.ServiceConnection;
import android.net.ConnectivityManager;
import android.net.NetworkInfo;
import android.os.Bundle;
import android.os.IBinder;
import android.util.SparseArray;
/**
* Enables an android application to communicate with an MQTT server using non-blocking methods.
* <p>
* Implementation of the MQTT asynchronous client interface {@link IMqttAsyncClient} , using the MQTT
* android service to actually interface with MQTT server. It provides android applications a simple programming interface to all features of the MQTT version 3.1
* specification including:
* <ul>
* <li>connect
* <li>publish
* <li>subscribe
* <li>unsubscribe
* <li>disconnect
* </ul>
* </p>
*/
public class MqttAndroidClient extends BroadcastReceiver implements
IMqttAsyncClient {
/**
*
* The Acknowledgment mode for messages received from {@link MqttCallback#messageArrived(String, MqttMessage)}
*
*/
public enum Ack {
/**
* As soon as the {@link MqttCallback#messageArrived(String, MqttMessage)} returns
* the message has been acknowledged as received .
*/
AUTO_ACK,
/**
* When {@link MqttCallback#messageArrived(String, MqttMessage)} returns the message
* will not be acknowledged as received, the application will have to make an acknowledgment call
* to {@link MqttAndroidClient} using {@link MqttAndroidClient#acknowledgeMessage(String)}
*/
MANUAL_ACK
}
private static final String SERVICE_NAME = "org.eclipse.paho.android.service.MqttService";
private static final int BIND_SERVICE_FLAG = 0;
private static ExecutorService pool = Executors.newCachedThreadPool();
/**
* ServiceConnection to process when we bind to our service
*/
private final class MyServiceConnection implements ServiceConnection {
@Override
public void onServiceConnected(ComponentName name, IBinder binder) {
mqttService = ((MqttServiceBinder) binder).getService();
// now that we have the service available, we can actually
// connect...
doConnect();
}
@Override
public void onServiceDisconnected(ComponentName name) {
mqttService = null;
}
}
// Listener for when the service is connected or disconnected
private MyServiceConnection serviceConnection = new MyServiceConnection();
// The Android Service which will process our mqtt calls
private MqttService mqttService;
// An identifier for the underlying client connection, which we can pass to
// the service
private String clientHandle;
Context myContext;
// We hold the various tokens in a collection and pass identifiers for them
// to the service
private SparseArray<IMqttToken> tokenMap = new SparseArray<IMqttToken>();
private int tokenNumber = 0;
// Connection data
private String serverURI;
private String clientId;
private MqttClientPersistence persistence = null;
private MqttConnectOptions connectOptions;
private IMqttToken connectToken;
// The MqttCallback provided by the application
private MqttCallback callback;
private MqttTraceHandler traceCallback;
//The acknowledgment that a message has been processed by the application
private Ack messageAck;
private boolean traceEnabled = false;
/**
* Constructor - create an MqttAndroidClient that can be used to communicate with an MQTT server on android
*
* @param context
* object used to pass context to the callback.
* @param serverURI
* specifies the protocol, host name and port to be used to
* connect to an MQTT server
* @param clientId
* specifies the name by which this connection should be
* identified to the server
*/
public MqttAndroidClient(Context context, String serverURI,
String clientId) {
this(context, serverURI, clientId, null, Ack.AUTO_ACK);
}
/**
* Constructor - create an MqttAndroidClient that can be used to communicate with an MQTT server on android
* @param ctx Application's context
* @param serverURI specifies the protocol, host name and port to be used to connect to an MQTT server
* @param clientId specifies the name by which this connection should be identified to the server
* @param ackType how the application wishes to acknowledge a message has been processed
*/
public MqttAndroidClient(Context ctx, String serverURI, String clientId, Ack ackType) {
this(ctx, serverURI, clientId, null, ackType);
}
/**
* Constructor - create an MqttAndroidClient that can be used to communicate with an MQTT server on android
* @param ctx Application's context
* @param serverURI specifies the protocol, host name and port to be used to connect to an MQTT server
* @param clientId specifies the name by which this connection should be identified to the server
* @param persistence The object to use to store persisted data
*/
public MqttAndroidClient(Context ctx, String serverURI, String clientId, MqttClientPersistence persistence) {
this(ctx, serverURI, clientId, null, Ack.AUTO_ACK);
}
/**
* constructor
*
* @param context
* used to pass context to the callback.
* @param serverURI
* specifies the protocol, host name and port to be used to
* connect to an MQTT server
* @param clientId
* specifies the name by which this connection should be
* identified to the server
* @param persistence
* the persistence class to use to store in-flight message. If null then the
* default persistence mechanism is used
* @param ackType
* how the application wishes to acknowledge a message has been processed.
*/
public MqttAndroidClient(Context context, String serverURI,
String clientId, MqttClientPersistence persistence, Ack ackType) {
myContext = context;
this.serverURI = serverURI;
this.clientId = clientId;
this.persistence = persistence;
messageAck = ackType;
}
/**
* Determines if this client is currently connected to the server.
*
* @return <code>true</code> if connected, <code>false</code> otherwise.
*/
@Override
public boolean isConnected() {
return mqttService.isConnected(clientHandle);
}
/**
* Returns the client ID used by this client.
* <p>All clients connected to the
* same server or server farm must have a unique ID.
* </p>
*
* @return the client ID used by this client.
*/
@Override
public String getClientId() {
return clientId;
}
/**
* Returns the URI address of the server used by this client.
* <p>The format of the returned String is the same as that used on the constructor.</p>
*
* @return the server's address, as a URI String.
*/
@Override
public String getServerURI() {
return serverURI;
}
/**
* Close the client
* Releases all resource associated with the client. After the client has
* been closed it cannot be reused. For instance attempts to connect will fail.
* @throws MqttException if the client is not disconnected.
*/
@Override
public void close() {
if (clientHandle == null) {
clientHandle = mqttService.getClient(serverURI, clientId, persistence);
}
mqttService.close(clientHandle);
}
/**
* Connects to an MQTT server using the default options.
* <p>The default options are specified in {@link MqttConnectOptions} class.
* </p>
*
* @throws MqttException for any connected problems
* @return token used to track and wait for the connect to complete. The token
* will be passed to the callback methods if a callback is set.
* @see #connect(MqttConnectOptions, Object, IMqttActionListener)
*/
@Override
public IMqttToken connect() throws MqttException {
return connect(null, null);
}
/**
* Connects to an MQTT server using the provided connect options.
* <p>The connection will be established using the options specified in the
* {@link MqttConnectOptions} parameter.
* </p>
*
* @param options a set of connection parameters that override the defaults.
* @throws MqttException for any connected problems
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @see #connect(MqttConnectOptions, Object, IMqttActionListener)
*/
@Override
public IMqttToken connect(MqttConnectOptions options) throws MqttException {
return connect(options, null, null);
}
/**
* Connects to an MQTT server using the default options.
* <p>The default options are specified in {@link MqttConnectOptions} class.
* </p>
*
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the connect completes. Use
* null if not required.
* @throws MqttException for any connected problems
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @see #connect(MqttConnectOptions, Object, IMqttActionListener)
*/
@Override
public IMqttToken connect(Object userContext, IMqttActionListener callback)
throws MqttException {
return connect(new MqttConnectOptions(), userContext, callback);
}
/**
* Connects to an MQTT server using the specified options.
* <p>The server to connect to is specified on the constructor.
* It is recommended to call {@link #setCallback(MqttCallback)} prior to
* connecting in order that messages destined for the client can be accepted
* as soon as the client is connected.
* </p>
* <p>The method returns control before the connect completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the returned token {@link IMqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener}</li>
* </ul>
* </p>
*
* @param options a set of connection parameters that override the defaults.
* @param userContext optional object for used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the connect completes. Use
* null if not required.
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for any connected problems, including communication errors
*/
@Override
public IMqttToken connect(MqttConnectOptions options, Object userContext,
IMqttActionListener callback) throws MqttException {
//check to see if there is a network connection where we can send data before attempting the connect
ConnectivityManager conManager = (ConnectivityManager) myContext.getSystemService(Context.CONNECTIVITY_SERVICE);
NetworkInfo netInf = conManager.getActiveNetworkInfo();
if ((netInf == null) || !netInf.isConnected()) {
throw new MqttException(MqttException.REASON_CODE_BROKER_UNAVAILABLE);
}
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
connectOptions = options;
connectToken = token;
/*
* The actual connection depends on the service, which we start and bind
* to here, but which we can't actually use until the serviceConnection
* onServiceConnected() method has run (asynchronously), so the
* connection itself takes place in the onServiceConnected() method
*/
if (mqttService == null) { // First time - must bind to the service
Intent serviceStartIntent = new Intent();
serviceStartIntent.setClassName(myContext, SERVICE_NAME);
Object service = myContext.startService(serviceStartIntent);
if (service == null) {
IMqttActionListener listener = token.getActionCallback();
if (listener != null) {
listener.onFailure(token, new RuntimeException(
"cannot start service " + SERVICE_NAME));
}
}
// We bind with BIND_SERVICE_FLAG (0), leaving us the manage the lifecycle
// until the last time it is stopped by a call to stopService()
myContext.startService(serviceStartIntent);
myContext.bindService(serviceStartIntent, serviceConnection,
Context.BIND_AUTO_CREATE);
IntentFilter filter = new IntentFilter();
filter.addAction(MqttServiceConstants.CALLBACK_TO_ACTIVITY);
myContext.registerReceiver(this, filter);
}
else {
pool.execute(new Runnable() {
@Override
public void run() {
doConnect();
}
});
}
return token;
}
/**
* Actually do the mqtt connect operation
*/
private void doConnect() {
if (clientHandle == null) {
clientHandle = mqttService.getClient(serverURI, clientId,
persistence);
}
mqttService.setTraceEnabled(traceEnabled);
mqttService.setTraceCallbackId(clientHandle);
String activityToken = storeToken(connectToken);
try {
mqttService.connect(clientHandle, connectOptions, null,
activityToken);
}
catch (MqttException e) {
IMqttActionListener listener = connectToken.getActionCallback();
if (listener != null) {
listener.onFailure(connectToken, e);
}
}
}
/**
* Disconnects from the server.
* <p>An attempt is made to quiesce the client allowing outstanding
* work to complete before disconnecting. It will wait
* for a maximum of 30 seconds for work to quiesce before disconnecting.
* This method must not be called from inside {@link MqttCallback} methods.
* </p>
*
* @return token used to track and wait for disconnect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for problems encountered while disconnecting
* @see #disconnect(long, Object, IMqttActionListener)
*/
@Override
public IMqttToken disconnect() throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, null,
(IMqttActionListener) null);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, null, activityToken);
return token;
}
/**
* Disconnects from the server.
* <p>An attempt is made to quiesce the client allowing outstanding
* work to complete before disconnecting. It will wait
* for a maximum of the specified quiesce time for work to complete before disconnecting.
* This method must not be called from inside {@link MqttCallback} methods.
* </p>
* @param quiesceTimeout the amount of time in milliseconds to allow for
* existing work to finish before disconnecting. A value of zero or less
* means the client will not quiesce.
* @return token used to track and wait for disconnect to complete. The token
* will be passed to the callback methods if a callback is set.
* @throws MqttException for problems encountered while disconnecting
* @see #disconnect(long, Object, IMqttActionListener)
*/
@Override
public IMqttToken disconnect(long quiesceTimeout) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, null,
(IMqttActionListener) null);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, quiesceTimeout, null,
activityToken);
return token;
}
/**
* Disconnects from the server.
* <p>An attempt is made to quiesce the client allowing outstanding
* work to complete before disconnecting. It will wait
* for a maximum of 30 seconds for work to quiesce before disconnecting.
* This method must not be called from inside {@link MqttCallback} methods.
* </p>
*
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the disconnect completes. Use
* null if not required.
* @return token used to track and wait for the disconnect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for problems encountered while disconnecting
* @see #disconnect(long, Object, IMqttActionListener)
*/
@Override
public IMqttToken disconnect(Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, null, activityToken);
return token;
}
/**
* Disconnects from the server.
* <p>
* The client will wait for {@link MqttCallback} methods to
* complete. It will then wait for up to the quiesce timeout to allow for
* work which has already been initiated to complete. For instance when a QoS 2
* message has started flowing to the server but the QoS 2 flow has not completed.It
* prevents new messages being accepted and does not send any messages that have
* been accepted but not yet started delivery across the network to the server. When
* work has completed or after the quiesce timeout, the client will disconnect from
* the server. If the cleanSession flag was set to false and is set to false the
* next time a connection is made QoS 1 and 2 messages that
* were not previously delivered will be delivered.</p>
* <p>This method must not be called from inside {@link MqttCallback} methods.</p>
* <p>The method returns control before the disconnect completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the returned token {@link IMqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener}</li>
* </ul>
* </p>
*
* @param quiesceTimeout the amount of time in milliseconds to allow for
* existing work to finish before disconnecting. A value of zero or less
* means the client will not quiesce.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when the disconnect completes. Use
* null if not required.
* @return token used to track and wait for the connect to complete. The token
* will be passed to any callback that has been set.
* @throws MqttException for problems encountered while disconnecting
*/
@Override
public IMqttToken disconnect(long quiesceTimeout, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.disconnect(clientHandle, quiesceTimeout, null,
activityToken);
return token;
}
/**
* Publishes a message to a topic on the server.
* <p>A convenience method, which will
* create a new {@link MqttMessage} object with a byte array payload and the
* specified QoS, and then publish it.
* </p>
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param payload the byte array to use as the payload
* @param qos the Quality of Service to deliver the message at. Valid values are 0, 1 or 2.
* @param retained whether or not this message should be retained by the server.
* @return token used to track and wait for the publish to complete. The token
* will be passed to any callback that has been set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance if too many messages are being processed.
* @see #publish(String, MqttMessage, Object, IMqttActionListener)
*/
@Override
public IMqttDeliveryToken publish(String topic, byte[] payload, int qos,
boolean retained) throws MqttException, MqttPersistenceException {
return publish(topic, payload, qos, retained, null, null);
}
/**
* Publishes a message to a topic on the server.
* Takes an {@link MqttMessage} message and delivers it to the server at the
* requested quality of service.
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param message to deliver to the server
* @return token used to track and wait for the publish to complete. The token
* will be passed to any callback that has been set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance client not connected.
* @see #publish(String, MqttMessage, Object, IMqttActionListener)
*/
@Override
public IMqttDeliveryToken publish(String topic, MqttMessage message)
throws MqttException, MqttPersistenceException {
return publish(topic, message, null, null);
}
/**
* Publishes a message to a topic on the server.
* <p>A convenience method, which will
* create a new {@link MqttMessage} object with a byte array payload and the
* specified QoS, and then publish it.
* </p>
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param payload the byte array to use as the payload
* @param qos the Quality of Service to deliver the message at. Valid values are 0, 1 or 2.
* @param retained whether or not this message should be retained by the server.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when message delivery
* hsa completed to the requested quality of service
* @return token used to track and wait for the publish to complete. The token
* will be passed to any callback that has been set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance client not connected.
* @see #publish(String, MqttMessage, Object, IMqttActionListener)
*/
@Override
public IMqttDeliveryToken publish(String topic, byte[] payload, int qos,
boolean retained, Object userContext, IMqttActionListener callback)
throws MqttException, MqttPersistenceException {
MqttMessage message = new MqttMessage(payload);
message.setQos(qos);
message.setRetained(retained);
MqttDeliveryTokenAndroid token = new MqttDeliveryTokenAndroid(
this, userContext, callback, message);
String activityToken = storeToken(token);
IMqttDeliveryToken internalToken = mqttService.publish(clientHandle,
topic, payload, qos, retained, null, activityToken);
token.setDelegate(internalToken);
return token;
}
/**
* Publishes a message to a topic on the server.
* <p>
* Once this method has returned cleanly, the message has been accepted for publication by the
* client and will be delivered on a background thread.
* In the event the connection fails or the client stops. Messages will be delivered to the
* requested quality of service once the connection is re-established to the server on condition that:
* <ul>
* <li>The connection is re-established with the same clientID
* <li>The original connection was made with (@link MqttConnectOptions#setCleanSession(boolean)}
* set to false
* <li>The connection is re-established with (@link MqttConnectOptions#setCleanSession(boolean)}
* set to false
* <li>Depending when the failure occurs QoS 0 messages may not be delivered.
* </ul>
* </p>
*
* <p>When building an application,
* the design of the topic tree should take into account the following principles
* of topic name syntax and semantics:</p>
*
* <ul>
* <li>A topic must be at least one character long.</li>
* <li>Topic names are case sensitive. For example, <em>ACCOUNTS</em> and <em>Accounts</em> are
* two different topics.</li>
* <li>Topic names can include the space character. For example, <em>Accounts
* payable</em> is a valid topic.</li>
* <li>A leading "/" creates a distinct topic. For example, <em>/finance</em> is
* different from <em>finance</em>. <em>/finance</em> matches "+/+" and "/+", but
* not "+".</li>
* <li>Do not include the null character (Unicode <samp class="codeph">\x0000</samp>) in
* any topic.</li>
* </ul>
*
* <p>The following principles apply to the construction and content of a topic
* tree:</p>
*
* <ul>
* <li>The length is limited to 64k but within that there are no limits to the
* number of levels in a topic tree.</li>
* <li>There can be any number of root nodes; that is, there can be any number
* of topic trees.</li>
* </ul>
* </p>
* <p>The method returns control before the publish completes. Completion can
* be tracked by:
* <ul>
* <li>Setting an {@link IMqttAsyncClient#setCallback(MqttCallback)} where the
* {@link MqttCallback#deliveryComplete(IMqttDeliveryToken)}
* method will be called.</li>
* <li>Waiting on the returned token {@link MqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener} to this method</li>
* </ul>
* </p>
*
* @param topic to deliver the message to, for example "finance/stock/ibm".
* @param message to deliver to the server
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when message delivery
* has completed to the requested quality of service
* @return token used to track and wait for the publish to complete. The token
* will be passed to callback methods if set.
* @throws MqttPersistenceException when a problem occurs storing the message
* @throws IllegalArgumentException if value of QoS is not 0, 1 or 2.
* @throws MqttException for other errors encountered while publishing the message.
* For instance client not connected.
* @see MqttMessage
*/
@Override
public IMqttDeliveryToken publish(String topic, MqttMessage message,
Object userContext, IMqttActionListener callback)
throws MqttException, MqttPersistenceException {
MqttDeliveryTokenAndroid token = new MqttDeliveryTokenAndroid(
this, userContext, callback, message);
String activityToken = storeToken(token);
IMqttDeliveryToken internalToken = mqttService.publish(clientHandle,
topic, message, null, activityToken);
token.setDelegate(internalToken);
return token;
}
/**
* Subscribe to a topic, which may include wildcards.
*
* @param topic the topic to subscribe to, which can include wildcards.
* @param qos the maximum quality of service at which to subscribe. Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttSecurityException for security related problems
* @throws MqttException for non security related problems
*
* @see #subscribe(String[], int[], Object, IMqttActionListener)
*/
@Override
public IMqttToken subscribe(String topic, int qos) throws MqttException,
MqttSecurityException {
return subscribe(topic, qos, null, null);
}
/**
* Subscribe to multiple topics, each of which may include wildcards.
*
* <p>Provides an optimized way to subscribe to multiple topics compared to
* subscribing to each one individually.</p>
*
* @param topic one or more topics to subscribe to, which can include wildcards
* @param qos the maximum quality of service at which to subscribe. Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttSecurityException for security related problems
* @throws MqttException for non security related problems
*
* @see #subscribe(String[], int[], Object, IMqttActionListener)
*/
@Override
public IMqttToken subscribe(String[] topic, int[] qos)
throws MqttException, MqttSecurityException {
return subscribe(topic, qos, null, null);
}
/**
* Subscribe to a topic, which may include wildcards.
*
* @param topic the topic to subscribe to, which can include wildcards.
* @param qos the maximum quality of service at which to subscribe. Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when subscribe
* has completed
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error registering the subscription.
*
* @see #subscribe(String[], int[], Object, IMqttActionListener)
*/
@Override
public IMqttToken subscribe(String topic, int qos, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback, new String[]{topic});
String activityToken = storeToken(token);
mqttService.subscribe(clientHandle, topic, qos, null, activityToken);
return token;
}
/**
* Subscribes to multiple topics, each of which may include wildcards.
* <p>Provides an optimized way to subscribe to multiple topics compared to
* subscribing to each one individually.</p>
* <p>The {@link #setCallback(MqttCallback)} method
* should be called before this method, otherwise any received messages
* will be discarded.
* </p>
* <p>
* If (@link MqttConnectOptions#setCleanSession(boolean)} was set to true
* when when connecting to the server then the subscription remains in place
* until either:
* <ul>
* <li>The client disconnects</li>
* <li>An unsubscribe method is called to un-subscribe the topic</li>
* </li>
* </p>
* <p>
* If (@link MqttConnectOptions#setCleanSession(boolean)} was set to false
* when connecting to the server then the subscription remains in place
* until either:
* <ul>
* <li>An unsubscribe method is called to unsubscribe the topic</li>
* <li>The next time the client connects with cleanSession set to true</ul>
* </li>
* With cleanSession set to false the MQTT server will store messages on
* behalf of the client when the client is not connected. The next time the
* client connects with the <bold>same client ID</bold> the server will
* deliver the stored messages to the client.
* </p>
*
* <p>The "topic filter" string used when subscribing
* may contain special characters, which allow you to subscribe to multiple topics
* at once.</p>
* <p>The topic level separator is used to introduce structure into the topic, and
* can therefore be specified within the topic for that purpose. The multi-level
* wildcard and single-level wildcard can be used for subscriptions, but they
* cannot be used within a topic by the publisher of a message.
* <dl>
* <dt>Topic level separator</dt>
* <dd>The forward slash (/) is used to separate each level within
* a topic tree and provide a hierarchical structure to the topic space. The
* use of the topic level separator is significant when the two wildcard characters
* are encountered in topics specified by subscribers.</dd>
*
* <dt>Multi-level wildcard</dt>
* <dd><p>The number sign (#) is a wildcard character that matches
* any number of levels within a topic. For example, if you subscribe to
* <span><span class="filepath">finance/stock/ibm/#</span></span>, you receive
* messages on these topics:
* <pre> finance/stock/ibm<br /> finance/stock/ibm/closingprice<br /> finance/stock/ibm/currentprice</pre>
* </p>
* <p>The multi-level wildcard
* can represent zero or more levels. Therefore, <em>finance/#</em> can also match
* the singular <em>finance</em>, where <em>#</em> represents zero levels. The topic
* level separator is meaningless in this context, because there are no levels
* to separate.</p>
*
* <p>The <span>multi-level</span> wildcard can
* be specified only on its own or next to the topic level separator character.
* Therefore, <em>#</em> and <em>finance/#</em> are both valid, but <em>finance#</em> is
* not valid. <span>The multi-level wildcard must be the last character
* used within the topic tree. For example, <em>finance/#</em> is valid but
* <em>finance/#/closingprice</em> is not valid.</span></p></dd>
*
* <dt>Single-level wildcard</dt>
* <dd><p>The plus sign (+) is a wildcard character that matches only one topic
* level. For example, <em>finance/stock/+</em> matches
* <em>finance/stock/ibm</em> and <em>finance/stock/xyz</em>,
* but not <em>finance/stock/ibm/closingprice</em>. Also, because the single-level
* wildcard matches only a single level, <em>finance/+</em> does not match <em>finance</em>.</p>
*
* <p>Use
* the single-level wildcard at any level in the topic tree, and in conjunction
* with the multilevel wildcard. Specify the single-level wildcard next to the
* topic level separator, except when it is specified on its own. Therefore,
* <em>+</em> and <em>finance/+</em> are both valid, but <em>finance+</em> is
* not valid. <span>The single-level wildcard can be used at the end of the
* topic tree or within the topic tree.
* For example, <em>finance/+</em> and <em>finance/+/ibm</em> are both valid.</span></p>
* </dd>
* </dl>
* </p>
* <p>The method returns control before the subscribe completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the supplied token {@link MqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener} to this method</li>
* </ul>
* </p>
*
* @param topic one or more topics to subscribe to, which can include wildcards
* @param qos the maximum quality of service to subscribe each topic at.Messages
* published at a lower quality of service will be received at the published
* QoS. Messages published at a higher quality of service will be received using
* the QoS specified on the subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when subscribe
* has completed
* @return token used to track and wait for the subscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error registering the subscription.
* @throws IllegalArgumentException if the two supplied arrays are not the same size.
*/
@Override
public IMqttToken subscribe(String[] topic, int[] qos, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback, topic);
String activityToken = storeToken(token);
mqttService.subscribe(clientHandle, topic, qos, null, activityToken);
return token;
}
/**
* Requests the server unsubscribe the client from a topic.
*
* @param topic the topic to unsubscribe from. It must match a topic
* specified on an earlier subscribe.
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*
* @see #unsubscribe(String[], Object, IMqttActionListener)
*/
@Override
public IMqttToken unsubscribe(String topic) throws MqttException {
return unsubscribe(topic, null, null);
}
/**
* Requests the server unsubscribe the client from one or more topics.
*
* @param topic one or more topics to unsubscribe from. Each topic
* must match one specified on an earlier subscribe.
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*
* @see #unsubscribe(String[], Object, IMqttActionListener)
*/
@Override
public IMqttToken unsubscribe(String[] topic) throws MqttException {
return unsubscribe(topic, null, null);
}
/**
* Requests the server unsubscribe the client from a topics.
*
* @param topic the topic to unsubscribe from. It must match a topic
* specified on an earlier subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when unsubscribe
* has completed
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*
* @see #unsubscribe(String[], Object, IMqttActionListener)
*/
@Override
public IMqttToken unsubscribe(String topic, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.unsubscribe(clientHandle, topic, null, activityToken);
return token;
}
/**
* Requests the server unsubscribe the client from one or more topics.
* <p>
* Unsubcribing is the opposite of subscribing. When the server receives the
* unsubscribe request it looks to see if it can find a matching subscription for the
* client and then removes it. After this point the server will send no more
* messages to the client for this subscription.
* </p>
* <p>The topic(s) specified on the unsubscribe must match the topic(s)
* specified in the original subscribe request for the unsubscribe to succeed
* </p>
* <p>The method returns control before the unsubscribe completes. Completion can
* be tracked by:
* <ul>
* <li>Waiting on the returned token {@link MqttToken#waitForCompletion()} or</li>
* <li>Passing in a callback {@link IMqttActionListener} to this method</li>
* </ul>
* </p>
*
* @param topic one or more topics to unsubscribe from. Each topic
* must match one specified on an earlier subscribe.
* @param userContext optional object used to pass context to the callback. Use
* null if not required.
* @param callback optional listener that will be notified when unsubscribe
* has completed
* @return token used to track and wait for the unsubscribe to complete. The token
* will be passed to callback methods if set.
* @throws MqttException if there was an error unregistering the subscription.
*/
@Override
public IMqttToken unsubscribe(String[] topic, Object userContext,
IMqttActionListener callback) throws MqttException {
IMqttToken token = new MqttTokenAndroid(this, userContext,
callback);
String activityToken = storeToken(token);
mqttService.unsubscribe(clientHandle, topic, null, activityToken);
return token;
}
/**
* Returns the delivery tokens for any outstanding publish operations.
* <p>If a client has been restarted and there are messages that were in the
* process of being delivered when the client stopped this method
* returns a token for each in-flight message enabling the delivery to be tracked
* Alternately the {@link MqttCallback#deliveryComplete(IMqttDeliveryToken)}
* callback can be used to track the delivery of outstanding messages.
* </p>
* <p>If a client connects with cleanSession true then there will be no
* delivery tokens as the cleanSession option deletes all earlier state.
* For state to be remembered the client must connect with cleanSession
* set to false</P>
* @return zero or more delivery tokens
*/
@Override
public IMqttDeliveryToken[] getPendingDeliveryTokens() {
return mqttService.getPendingDeliveryTokens(clientHandle);
}
/**
* Sets a callback listener to use for events that happen asynchronously.
* <p>There are a number of events that the listener will be notified about.
* These include:
* <ul>
* <li>A new message has arrived and is ready to be processed</li>
* <li>The connection to the server has been lost</li>
* <li>Delivery of a message to the server has completed</li>
* </ul>
* </p>
* <p>Other events that track the progress of an individual operation such
* as connect and subscribe can be tracked using the {@link MqttToken} returned from
* each non-blocking method or using setting a {@link IMqttActionListener} on the
* non-blocking method.<p>
* @param callback which will be invoked for certain asynchronous events
*
* @see MqttCallback
*/
@Override
public void setCallback(MqttCallback callback) {
this.callback = callback;
}
/**
* identify the callback to be invoked when making tracing calls back into
* the Activity
*
* @param traceCallback handler
*/
public void setTraceCallback(MqttTraceHandler traceCallback) {
this.traceCallback = traceCallback;
// mqttService.setTraceCallbackId(traceCallbackId);
}
/**
* turn tracing on and off
*
* @param traceEnabled set <code>true</code> to enable trace, otherwise, set <code>false</code> to disable trace
*
*/
public void setTraceEnabled(boolean traceEnabled) {
this.traceEnabled = traceEnabled;
if (mqttService !=null)
mqttService.setTraceEnabled(traceEnabled);
}
/**
* <p>
* Process incoming Intent objects representing the results of operations
* and asynchronous activities such as message received
* </p>
* <p>
* <strong>Note:</strong> This is only a public method because the Android
* APIs require such.<br>
* This method should not be explicitly invoked.
* </p>
*/
@Override
public void onReceive(Context context, Intent intent) {
Bundle data = intent.getExtras();
String handleFromIntent = data
.getString(MqttServiceConstants.CALLBACK_CLIENT_HANDLE);
if ((handleFromIntent == null)
|| (!handleFromIntent.equals(clientHandle))) {
return;
}
String action = data.getString(MqttServiceConstants.CALLBACK_ACTION);
if (action.equals(MqttServiceConstants.CONNECT_ACTION)) {
connectAction(data);
}
else if (action.equals(MqttServiceConstants.MESSAGE_ARRIVED_ACTION)) {
messageArrivedAction(data);
}
else if (action.equals(MqttServiceConstants.SUBSCRIBE_ACTION)) {
subscribeAction(data);
}
else if (action.equals(MqttServiceConstants.UNSUBSCRIBE_ACTION)) {
unSubscribeAction(data);
}
else if (action.equals(MqttServiceConstants.SEND_ACTION)) {
sendAction(data);
}
else if (action.equals(MqttServiceConstants.MESSAGE_DELIVERED_ACTION)) {
messageDeliveredAction(data);
}
else if (action
.equals(MqttServiceConstants.ON_CONNECTION_LOST_ACTION)) {
connectionLostAction(data);
}
else if (action.equals(MqttServiceConstants.DISCONNECT_ACTION)) {
disconnected(data);
}
else if (action.equals(MqttServiceConstants.TRACE_ACTION)) {
traceAction(data);
}
}
/**
* Acknowledges a message received on the {@link MqttCallback#messageArrived(String, MqttMessage)}
* @param messageId the messageId received from the MqttMessage (To access this field you need to cast {@link MqttMessage} to {@link ParcelableMqttMessage})
* @return whether or not the message was successfully acknowledged
*/
public boolean acknowledgeMessage(String messageId) {
if (messageAck == Ack.MANUAL_ACK) {
Status status = mqttService.acknowledgeMessageArrival(clientHandle, messageId);
return status == Status.OK;
}
return false;
}
/**
* Process the results of a connection
*
* @param data
*/
private void connectAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
simpleAction(token, data);
}
/**
* Process a notification that we have disconnected
*
* @param data
*/
private void disconnected(Bundle data) {
clientHandle = null; // avoid reuse!
IMqttToken token = removeMqttToken(data);
if (token != null) {
((MqttTokenAndroid) token).notifyComplete();
}
if (callback != null) {
callback.connectionLost(null);
}
}
/**
* Process a Connection Lost notification
*
* @param data
*/
private void connectionLostAction(Bundle data) {
if (callback != null) {
Exception reason = (Exception) data
.getSerializable(MqttServiceConstants.CALLBACK_EXCEPTION);
callback.connectionLost(reason);
}
}
/**
* Common processing for many notifications
*
* @param token
* the token associated with the action being undertake
* @param data
* the result data
*/
private void simpleAction(IMqttToken token, Bundle data) {
if (token != null) {
Status status = (Status) data
.getSerializable(MqttServiceConstants.CALLBACK_STATUS);
if (status == Status.OK) {
((MqttTokenAndroid) token).notifyComplete();
}
else {
Exception exceptionThrown = (Exception) data.getSerializable(MqttServiceConstants.CALLBACK_EXCEPTION);
((MqttTokenAndroid) token)
.notifyFailure(exceptionThrown);
}
}
}
/**
* Process notification of a publish(send) operation
*
* @param data
*/
private void sendAction(Bundle data) {
IMqttToken token = getMqttToken(data); // get, don't remove - will
// remove on delivery
simpleAction(token, data);
}
/**
* Process notification of a subscribe operation
*
* @param data
*/
private void subscribeAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
simpleAction(token, data);
}
/**
* Process notification of an unsubscribe operation
*
* @param data
*/
private void unSubscribeAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
simpleAction(token, data);
}
/**
* Process notification of a published message having been delivered
*
* @param data
*/
private void messageDeliveredAction(Bundle data) {
IMqttToken token = removeMqttToken(data);
if (token != null) {
if (callback != null) {
Status status = (Status) data
.getSerializable(MqttServiceConstants.CALLBACK_STATUS);
if (status == Status.OK) {
callback.deliveryComplete((IMqttDeliveryToken) token);
}
}
}
}
/**
* Process notification of a message's arrival
*
* @param data
*/
private void messageArrivedAction(Bundle data) {
if (callback != null) {
String messageId = data
.getString(MqttServiceConstants.CALLBACK_MESSAGE_ID);
String destinationName = data
.getString(MqttServiceConstants.CALLBACK_DESTINATION_NAME);
ParcelableMqttMessage message = (ParcelableMqttMessage) data
.getParcelable(MqttServiceConstants.CALLBACK_MESSAGE_PARCEL);
try {
if (messageAck == Ack.AUTO_ACK) {
callback.messageArrived(destinationName, message);
mqttService.acknowledgeMessageArrival(clientHandle, messageId);
}
else {
message.messageId = messageId;
callback.messageArrived(destinationName, message);
}
// let the service discard the saved message details
}
catch (Exception e) {
// Swallow the exception
}
}
}
/**
* Process trace action - pass trace data back to the callback
*
* @param data
*/
private void traceAction(Bundle data) {
if (traceCallback != null) {
String severity = data.getString(MqttServiceConstants.CALLBACK_TRACE_SEVERITY);
String message = data.getString(MqttServiceConstants.CALLBACK_ERROR_MESSAGE);
String tag = data.getString(MqttServiceConstants.CALLBACK_TRACE_TAG);
if (severity == MqttServiceConstants.TRACE_DEBUG)
traceCallback.traceDebug(tag, message);
else if (severity == MqttServiceConstants.TRACE_ERROR)
traceCallback.traceError(tag, message);
else
{
Exception e = (Exception) data.getSerializable(MqttServiceConstants.CALLBACK_EXCEPTION);
traceCallback.traceException(tag, message, e);
}
}
}
/**
* @param token
* identifying an operation
* @return an identifier for the token which can be passed to the Android
* Service
*/
private synchronized String storeToken(IMqttToken token) {
tokenMap.put(tokenNumber, token);
return Integer.toString(tokenNumber++);
}
/**
* Get a token identified by a string, and remove it from our map
*
* @param data
* @return the token
*/
private synchronized IMqttToken removeMqttToken(Bundle data) {
String activityToken = data.getString(MqttServiceConstants.CALLBACK_ACTIVITY_TOKEN);
if (activityToken!=null){
int tokenNumber = Integer.parseInt(activityToken);
IMqttToken token = tokenMap.get(tokenNumber);
tokenMap.delete(tokenNumber);
return token;
}
return null;
}
/**
* Get a token identified by a string, and remove it from our map
*
* @param data
* @return the token
*/
private synchronized IMqttToken getMqttToken(Bundle data) {
String activityToken = data
.getString(MqttServiceConstants.CALLBACK_ACTIVITY_TOKEN);
IMqttToken token = tokenMap.get(Integer.parseInt(activityToken));
return token;
}
/**
* Get the SSLSocketFactory using SSL key store and password
* <p>A convenience method, which will help user to create a SSLSocketFactory object</p>
*
* @param keyStore the SSL key store which is generated by some SSL key tool, such as keytool in Java JDK
* @param password the password of the key store which is set when the key store is generated
* @return SSLSocketFactory used to connect to the server with SSL authentication
* @throws MqttSecurityException if there was any error when getting the SSLSocketFactory
*/
public SSLSocketFactory getSSLSocketFactory (InputStream keyStore, String password) throws MqttSecurityException {
try{
SSLContext ctx = null;
SSLSocketFactory sslSockFactory=null;
KeyStore ts;
ts = KeyStore.getInstance("BKS");
ts.load(keyStore, password.toCharArray());
TrustManagerFactory tmf = TrustManagerFactory.getInstance("X509");
tmf.init(ts);
TrustManager[] tm = tmf.getTrustManagers();
ctx = SSLContext.getInstance("SSL");
ctx.init(null, tm, null);
sslSockFactory=ctx.getSocketFactory();
return sslSockFactory;
} catch (KeyStoreException e) {
throw new MqttSecurityException(e);
} catch (CertificateException e) {
throw new MqttSecurityException(e);
} catch (FileNotFoundException e) {
throw new MqttSecurityException(e);
} catch (IOException e) {
throw new MqttSecurityException(e);
} catch (NoSuchAlgorithmException e) {
throw new MqttSecurityException(e);
} catch (KeyManagementException e) {
throw new MqttSecurityException(e);
}
}
@Override
public void disconnectForcibly() throws MqttException {
throw new UnsupportedOperationException();
}
@Override
public void disconnectForcibly(long disconnectTimeout) throws MqttException {
throw new UnsupportedOperationException();
}
@Override
public void disconnectForcibly(long quiesceTimeout, long disconnectTimeout)
throws MqttException {
throw new UnsupportedOperationException();
}
@Override
public MqttProtocolVersion getProtocolVersion() {
throw new UnsupportedOperationException();
}
@Override
public void setProtocolVersion(MqttProtocolVersion version) {
throw new UnsupportedOperationException();
}
}
| Fixed null pointer exception in broadcast receiver if somebody send a null callback action to MqttAndroidClient.
Change-Id: Iba17f41a2cc3f5094bb310661b4c9a37a137f6e4
Signed-off-by: pleong <[email protected]>
| org.eclipse.paho.android.service/org.eclipse.paho.android.service/src/org/eclipse/paho/android/service/MqttAndroidClient.java | Fixed null pointer exception in broadcast receiver if somebody send a null callback action to MqttAndroidClient. |
|
Java | agpl-3.0 | abf60f5c18a91e9a6c00ae15252c801bf0a35984 | 0 | quikkian-ua-devops/kfs,smith750/kfs,quikkian-ua-devops/will-financials,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,UniversityOfHawaii/kfs,quikkian-ua-devops/kfs,ua-eas/kfs-devops-automation-fork,UniversityOfHawaii/kfs,ua-eas/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,UniversityOfHawaii/kfs,bhutchinson/kfs,bhutchinson/kfs,kkronenb/kfs,kuali/kfs,ua-eas/kfs-devops-automation-fork,quikkian-ua-devops/will-financials,ua-eas/kfs,bhutchinson/kfs,ua-eas/kfs,UniversityOfHawaii/kfs,ua-eas/kfs,ua-eas/kfs-devops-automation-fork,ua-eas/kfs-devops-automation-fork,kuali/kfs,quikkian-ua-devops/will-financials,bhutchinson/kfs,kuali/kfs,smith750/kfs,kkronenb/kfs,quikkian-ua-devops/kfs,UniversityOfHawaii/kfs,smith750/kfs,smith750/kfs,ua-eas/kfs,quikkian-ua-devops/will-financials,quikkian-ua-devops/kfs,kuali/kfs,kkronenb/kfs,kuali/kfs,quikkian-ua-devops/kfs,quikkian-ua-devops/will-financials | /*
* Copyright 2010 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.endow.document.service.impl;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionSecurity;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionTaxLotLine;
import org.kuali.kfs.module.endow.businessobject.HoldingTaxLot;
import org.kuali.kfs.module.endow.businessobject.Security;
import org.kuali.kfs.module.endow.document.AssetDecreaseDocument;
import org.kuali.kfs.module.endow.document.AssetIncreaseDocument;
import org.kuali.kfs.module.endow.document.service.HoldingTaxLotService;
import org.kuali.kfs.module.endow.document.service.KEMService;
import org.kuali.kfs.module.endow.document.service.SecurityService;
import org.kuali.kfs.module.endow.document.service.UpdateAssetDecreaseDocumentTaxLotsService;
import org.kuali.rice.kns.util.KualiDecimal;
import org.kuali.rice.kns.util.ObjectUtils;
/**
* Provides an implementation for the transaction line related tax lots update for the AssetIncreaseDocument.
*/
public class UpdateAssetDecreaseDocumentTaxLotsServiceImpl implements UpdateAssetDecreaseDocumentTaxLotsService {
private HoldingTaxLotService taxLotService;
private SecurityService securityService;
private KEMService kemService;
/**
*
* @see org.kuali.kfs.module.endow.document.service.UpdateAssetDecreaseDocumentTaxLotsService#updateTransactionLineTaxLots(org.kuali.kfs.module.endow.document.AssetDecreaseDocument, org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine)
*/
public void updateTransactionLineTaxLots(AssetDecreaseDocument adDocument, EndowmentTransactionLine transLine) {
EndowmentTransactionTaxLotLine taxLotLine = null;
boolean newLine = false;
EndowmentTransactionSecurity endowmentTransactionSecurity = adDocument.getSourceTransactionSecurity();
// updating an existing tax lot
if (transLine.getTaxLotLines() != null && transLine.getTaxLotLines().size() > 0) {
// there is only one tax lot line per each transaction line
taxLotLine = transLine.getTaxLotLines().get(0);
}
// or adding a new one
else {
// create and set a new tax lot line
newLine = true;
taxLotLine = new EndowmentTransactionTaxLotLine();
taxLotLine.setDocumentNumber(adDocument.getDocumentNumber());
taxLotLine.setDocumentLineNumber(transLine.getTransactionLineNumber());
taxLotLine.setTransactionHoldingLotNumber(1);
taxLotLine.setKemid(transLine.getKemid());
taxLotLine.setSecurityID(endowmentTransactionSecurity.getSecurityID());
taxLotLine.setRegistrationCode(endowmentTransactionSecurity.getRegistrationCode());
taxLotLine.setIpIndicator(transLine.getTransactionIPIndicatorCode());
}
taxLotLine.setLotUnits(transLine.getTransactionUnits().bigDecimalValue().negate());
taxLotLine.setLotHoldingCost(transLine.getTransactionAmount().bigDecimalValue().negate());
// set the tax lot acquired date
setTaxLotAcquiredDate(taxLotLine, adDocument, transLine);
// set the new lot indicator
setNewLotIndicator(taxLotLine, adDocument);
if (newLine) {
transLine.getTaxLotLines().add(taxLotLine);
}
}
/**
* Sets the Acquired date for the given tax lot line. If the tax lot indicator for the security (END_TRAN_SEC_T:
* SEC_TAX_LOT_IND) is No then for the lot acquired date - LOT_AQ_DATE - Search the END_HLDG_TAX_LOT_T records by KEMID by
* SEC_ID by REGIS_CD by HLDG_IP_IND [where HLDG_IP_IND is equal to END_TRAN_LN_T: TRAN_IP_IND_CD] by HLDG_LOT_NBR where
* HLDG_LOT_NBR is equal to 1 and return the HLDG_ACQD_DT: - If a lot exists for the security in END_HLDG_TAX_LOT_T, but the
* HLDG_UNITS and HLDG_COST are zero, insert the current date (System or Process) in LOT_ACQD_DT. - IF no lot exists for the
* security, then insert the current date (System or Process) in LOT_ACQD_DT. If the tax lot indicator for the security
* (END_TRAN_SEC_T: SEC_TAX_LOT_IND) is Yes: - LOT_AQ_DATE - insert the current date (System or Process) in this field
*
* @param taxLotLine the tax lot line for which to set the acquired date
* @param adDocument the Asset Decrease Document the tax lot line belongs to
* @param transLine the transaction line the tax lot is related to
*/
private void setTaxLotAcquiredDate(EndowmentTransactionTaxLotLine taxLotLine, AssetDecreaseDocument adDocument, EndowmentTransactionLine transLine) {
EndowmentTransactionSecurity endowmentTransactionSecurity = adDocument.getSourceTransactionSecurity();
Security security = securityService.getByPrimaryKey(endowmentTransactionSecurity.getSecurityID());
// if security tax lot indicator is 'No' and a tax lot exists for the kemid, security, registration code and income
// principal indicator - set the lot acquired date to be the tax lot holding acquired date if units and cost is not zero;
// otherwise set the date to be the current date
if (ObjectUtils.isNotNull(security) && !security.getClassCode().isTaxLotIndicator()) {
HoldingTaxLot holdingTaxLot = taxLotService.getByPrimaryKey(transLine.getKemid(), endowmentTransactionSecurity.getSecurityID(), endowmentTransactionSecurity.getRegistrationCode(), 1, transLine.getTransactionIPIndicatorCode());
if (ObjectUtils.isNotNull(holdingTaxLot)) {
if (holdingTaxLot.getUnits().equals(KualiDecimal.ZERO) && holdingTaxLot.getCost().equals(KualiDecimal.ZERO)) {
taxLotLine.setLotAcquiredDate(kemService.getCurrentDate());
}
else {
taxLotLine.setLotAcquiredDate(holdingTaxLot.getAcquiredDate());
}
}
else {
taxLotLine.setLotAcquiredDate(kemService.getCurrentDate());
}
}
// if security tax lot indicator is 'Yes' set the lot acquired date to be the current date
else {
taxLotLine.setLotAcquiredDate(kemService.getCurrentDate());
}
}
/**
* Sets the new lot indicator for the tax lot: -- if the security tax lot indicator is No then I think we should set the field
* to 'N'. When the batch process runs we might need to create a new entry on the holding tax lot table in case no entry is
* found for the given KEMID, security ID, registration code, holding ip indicator, and holding lot number = 1. In case there is
* an entry we will just update that one; -- if the security tax lot is Yes then the field should be set to 'Y'.We are always
* creating a new field with the lot number being the next sequential lot number.
*
* @param taxLotLine
* @param adDocument
*/
private void setNewLotIndicator(EndowmentTransactionTaxLotLine taxLotLine, AssetDecreaseDocument adDocument) {
EndowmentTransactionSecurity endowmentTransactionSecurity = adDocument.getSourceTransactionSecurity();
Security security = securityService.getByPrimaryKey(endowmentTransactionSecurity.getSecurityID());
if (ObjectUtils.isNotNull(security)) {
// if the security tax lot indicator is No then I think we should set the field to 'N'. When the batch process runs we
// might need to create a new entry on the holding tax lot table in case no entry is found for the given KEMID, security
// ID, registration code, holding ip indicator, and holding lot number = 1. In case there is an entry we will just
// update that one
if (!security.getClassCode().isTaxLotIndicator()) {
taxLotLine.setNewLotIndicator(false);
}
// if the security tax lot is Yes then the field should be set to 'Y'.We are always creating a new field with the lot
// number being the next sequential lot number.
else {
taxLotLine.setNewLotIndicator(true);
}
}
}
/**
* Gets the taxLotService.
*
* @return taxLotService
*/
protected HoldingTaxLotService getTaxLotService() {
return taxLotService;
}
/**
* Sets the taxLotService.
*
* @param taxLotService
*/
public void setTaxLotService(HoldingTaxLotService taxLotService) {
this.taxLotService = taxLotService;
}
/**
* Gets the securityService.
*
* @return securityService
*/
protected SecurityService getSecurityService() {
return securityService;
}
/**
* Sets the securityService.
*
* @param securityService
*/
public void setSecurityService(SecurityService securityService) {
this.securityService = securityService;
}
/**
* Gets the kemService.
*
* @return kemService
*/
protected KEMService getKemService() {
return kemService;
}
/**
* Sets the kemService.
*
* @param kemService
*/
public void setKemService(KEMService kemService) {
this.kemService = kemService;
}
}
| work/src/org/kuali/kfs/module/endow/document/service/impl/UpdateAssetDecreaseDocumentTaxLotsServiceImpl.java | /*
* Copyright 2010 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.kfs.module.endow.document.service.impl;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionSecurity;
import org.kuali.kfs.module.endow.businessobject.EndowmentTransactionTaxLotLine;
import org.kuali.kfs.module.endow.businessobject.HoldingTaxLot;
import org.kuali.kfs.module.endow.businessobject.Security;
import org.kuali.kfs.module.endow.document.AssetDecreaseDocument;
import org.kuali.kfs.module.endow.document.AssetIncreaseDocument;
import org.kuali.kfs.module.endow.document.service.HoldingTaxLotService;
import org.kuali.kfs.module.endow.document.service.KEMService;
import org.kuali.kfs.module.endow.document.service.SecurityService;
import org.kuali.kfs.module.endow.document.service.UpdateAssetDecreaseDocumentTaxLotsService;
import org.kuali.rice.kns.util.KualiDecimal;
import org.kuali.rice.kns.util.ObjectUtils;
/**
* Provides an implementation for the transaction line related tax lots update for the AssetIncreaseDocument.
*/
public class UpdateAssetDecreaseDocumentTaxLotsServiceImpl implements UpdateAssetDecreaseDocumentTaxLotsService {
private HoldingTaxLotService taxLotService;
private SecurityService securityService;
private KEMService kemService;
/**
*
* @see org.kuali.kfs.module.endow.document.service.UpdateAssetDecreaseDocumentTaxLotsService#updateTransactionLineTaxLots(org.kuali.kfs.module.endow.document.AssetDecreaseDocument, org.kuali.kfs.module.endow.businessobject.EndowmentTransactionLine)
*/
public void updateTransactionLineTaxLots(AssetDecreaseDocument adDocument, EndowmentTransactionLine transLine) {
EndowmentTransactionTaxLotLine taxLotLine = null;
boolean newLine = false;
EndowmentTransactionSecurity endowmentTransactionSecurity = adDocument.getSourceTransactionSecurity();
// updating an existing tax lot
if (transLine.getTaxLotLines() != null && transLine.getTaxLotLines().size() > 0) {
// there is only one tax lot line per each transaction line
taxLotLine = transLine.getTaxLotLines().get(0);
}
// or adding a new one
else {
// create and set a new tax lot line
newLine = true;
taxLotLine = new EndowmentTransactionTaxLotLine();
taxLotLine.setDocumentNumber(adDocument.getDocumentNumber());
taxLotLine.setDocumentLineNumber(transLine.getTransactionLineNumber());
taxLotLine.setTransactionHoldingLotNumber(1);
taxLotLine.setKemid(transLine.getKemid());
taxLotLine.setSecurityID(endowmentTransactionSecurity.getSecurityID());
taxLotLine.setRegistrationCode(endowmentTransactionSecurity.getRegistrationCode());
taxLotLine.setIpIndicator(transLine.getTransactionIPIndicatorCode());
}
taxLotLine.setLotUnits(transLine.getTransactionUnits().bigDecimalValue());
taxLotLine.setLotHoldingCost(transLine.getTransactionAmount().bigDecimalValue());
// set the tax lot acquired date
setTaxLotAcquiredDate(taxLotLine, adDocument, transLine);
// set the new lot indicator
setNewLotIndicator(taxLotLine, adDocument);
if (newLine) {
transLine.getTaxLotLines().add(taxLotLine);
}
}
/**
* Sets the Acquired date for the given tax lot line. If the tax lot indicator for the security (END_TRAN_SEC_T:
* SEC_TAX_LOT_IND) is No then for the lot acquired date - LOT_AQ_DATE - Search the END_HLDG_TAX_LOT_T records by KEMID by
* SEC_ID by REGIS_CD by HLDG_IP_IND [where HLDG_IP_IND is equal to END_TRAN_LN_T: TRAN_IP_IND_CD] by HLDG_LOT_NBR where
* HLDG_LOT_NBR is equal to 1 and return the HLDG_ACQD_DT: - If a lot exists for the security in END_HLDG_TAX_LOT_T, but the
* HLDG_UNITS and HLDG_COST are zero, insert the current date (System or Process) in LOT_ACQD_DT. - IF no lot exists for the
* security, then insert the current date (System or Process) in LOT_ACQD_DT. If the tax lot indicator for the security
* (END_TRAN_SEC_T: SEC_TAX_LOT_IND) is Yes: - LOT_AQ_DATE - insert the current date (System or Process) in this field
*
* @param taxLotLine the tax lot line for which to set the acquired date
* @param adDocument the Asset Decrease Document the tax lot line belongs to
* @param transLine the transaction line the tax lot is related to
*/
private void setTaxLotAcquiredDate(EndowmentTransactionTaxLotLine taxLotLine, AssetDecreaseDocument adDocument, EndowmentTransactionLine transLine) {
EndowmentTransactionSecurity endowmentTransactionSecurity = adDocument.getSourceTransactionSecurity();
Security security = securityService.getByPrimaryKey(endowmentTransactionSecurity.getSecurityID());
// if security tax lot indicator is 'No' and a tax lot exists for the kemid, security, registration code and income
// principal indicator - set the lot acquired date to be the tax lot holding acquired date if units and cost is not zero;
// otherwise set the date to be the current date
if (ObjectUtils.isNotNull(security) && !security.getClassCode().isTaxLotIndicator()) {
HoldingTaxLot holdingTaxLot = taxLotService.getByPrimaryKey(transLine.getKemid(), endowmentTransactionSecurity.getSecurityID(), endowmentTransactionSecurity.getRegistrationCode(), 1, transLine.getTransactionIPIndicatorCode());
if (ObjectUtils.isNotNull(holdingTaxLot)) {
if (holdingTaxLot.getUnits().equals(KualiDecimal.ZERO) && holdingTaxLot.getCost().equals(KualiDecimal.ZERO)) {
taxLotLine.setLotAcquiredDate(kemService.getCurrentDate());
}
else {
taxLotLine.setLotAcquiredDate(holdingTaxLot.getAcquiredDate());
}
}
else {
taxLotLine.setLotAcquiredDate(kemService.getCurrentDate());
}
}
// if security tax lot indicator is 'Yes' set the lot acquired date to be the current date
else {
taxLotLine.setLotAcquiredDate(kemService.getCurrentDate());
}
}
/**
* Sets the new lot indicator for the tax lot: -- if the security tax lot indicator is No then I think we should set the field
* to 'N'. When the batch process runs we might need to create a new entry on the holding tax lot table in case no entry is
* found for the given KEMID, security ID, registration code, holding ip indicator, and holding lot number = 1. In case there is
* an entry we will just update that one; -- if the security tax lot is Yes then the field should be set to 'Y'.We are always
* creating a new field with the lot number being the next sequential lot number.
*
* @param taxLotLine
* @param adDocument
*/
private void setNewLotIndicator(EndowmentTransactionTaxLotLine taxLotLine, AssetDecreaseDocument adDocument) {
EndowmentTransactionSecurity endowmentTransactionSecurity = adDocument.getSourceTransactionSecurity();
Security security = securityService.getByPrimaryKey(endowmentTransactionSecurity.getSecurityID());
if (ObjectUtils.isNotNull(security)) {
// if the security tax lot indicator is No then I think we should set the field to 'N'. When the batch process runs we
// might need to create a new entry on the holding tax lot table in case no entry is found for the given KEMID, security
// ID, registration code, holding ip indicator, and holding lot number = 1. In case there is an entry we will just
// update that one
if (!security.getClassCode().isTaxLotIndicator()) {
taxLotLine.setNewLotIndicator(false);
}
// if the security tax lot is Yes then the field should be set to 'Y'.We are always creating a new field with the lot
// number being the next sequential lot number.
else {
taxLotLine.setNewLotIndicator(true);
}
}
}
/**
* Gets the taxLotService.
*
* @return taxLotService
*/
protected HoldingTaxLotService getTaxLotService() {
return taxLotService;
}
/**
* Sets the taxLotService.
*
* @param taxLotService
*/
public void setTaxLotService(HoldingTaxLotService taxLotService) {
this.taxLotService = taxLotService;
}
/**
* Gets the securityService.
*
* @return securityService
*/
protected SecurityService getSecurityService() {
return securityService;
}
/**
* Sets the securityService.
*
* @param securityService
*/
public void setSecurityService(SecurityService securityService) {
this.securityService = securityService;
}
/**
* Gets the kemService.
*
* @return kemService
*/
protected KEMService getKemService() {
return kemService;
}
/**
* Sets the kemService.
*
* @param kemService
*/
public void setKemService(KEMService kemService) {
this.kemService = kemService;
}
}
| KULENDOW-559: Unit and amount values need to be negative i.e. opposite of the transaction line's unit and amount.
| work/src/org/kuali/kfs/module/endow/document/service/impl/UpdateAssetDecreaseDocumentTaxLotsServiceImpl.java | KULENDOW-559: Unit and amount values need to be negative i.e. opposite of the transaction line's unit and amount. |
|
Java | lgpl-2.1 | 9d1519d9649abf805091984fd8083b06637047ff | 0 | sbliven/biojava,sbliven/biojava,sbliven/biojava | package org.biojava.bio.structure.align.util;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.Iterator;
import org.biojava.bio.structure.Atom;
import org.biojava.bio.structure.Chain;
import org.biojava.bio.structure.Group;
import org.biojava.bio.structure.Structure;
import org.biojava.bio.structure.StructureException;
import org.biojava.bio.structure.StructureTools;
import org.biojava.bio.structure.align.ce.AbstractUserArgumentProcessor;
import org.biojava.bio.structure.io.FileParsingParameters;
import org.biojava.bio.structure.io.PDBFileReader;
import org.biojava.bio.structure.scop.ScopDatabase;
import org.biojava.bio.structure.scop.ScopDomain;
import org.biojava.bio.structure.scop.ScopFactory;
import org.biojava.bio.structure.scop.ScopInstallation;
import org.biojava3.core.util.InputStreamProvider;
/** A utility class that provides easy access to Structure objects. If you are running a
* script that is frequently re-using the same PDB structures, the AtomCache keeps an
* in-memory cache of the files for quicker access. The cache is a soft-cache, this
* means it won't cause out of memory exceptions, but garbage collects the data if the
* Java virtual machine needs to free up space. The AtomCache is thread-safe.
*
* @author Andreas Prlic
* @since 3.0
*/
public class AtomCache {
public static final String CHAIN_NR_SYMBOL = ":";
public static final String UNDERSCORE = "_";
public static final String CHAIN_SPLIT_SYMBOL = ".";
private static final String FILE_SEPARATOR = System.getProperty("file.separator");
String path;
// make sure IDs are loaded uniquely
Collection<String> currentlyLoading = Collections.synchronizedCollection(new TreeSet<String>());
private static ScopDatabase scopInstallation ;
boolean autoFetch;
boolean isSplit;
boolean strictSCOP;
FileParsingParameters params;
private boolean fetchFileEvenIfObsolete;
private boolean fetchCurrent;
/** Creates an instance of an AtomCache that is pointed to the a particular
* path in the file system.
*
* @param pdbFilePath a directory in the file system to use as a location to cache files.
* @param isSplit a flag to indicate if the directory organisation is "split" as on the PDB ftp servers, or if all files are contained in one directory.
*/
public AtomCache(String pdbFilePath, boolean isSplit){
if ( ! pdbFilePath.endsWith(FILE_SEPARATOR))
pdbFilePath += FILE_SEPARATOR;
// we are caching the binary files that contain the PDBs gzipped
// that is the most memory efficient way of caching...
// set the input stream provider to caching mode
System.setProperty(InputStreamProvider.CACHE_PROPERTY, "true");
path = pdbFilePath;
System.setProperty(AbstractUserArgumentProcessor.PDB_DIR,path);
//this.cache = cache;
this.isSplit = isSplit;
autoFetch = true;
fetchFileEvenIfObsolete = false;
fetchCurrent = false;
currentlyLoading.clear();
params = new FileParsingParameters();
// we don't need this here
params.setAlignSeqRes(false);
// no secstruc either
params.setParseSecStruc(false);
//
this.strictSCOP = true;
scopInstallation = null;
}
/** Creates a new AtomCache object based on the provided UserConfiguration.
*
* @param config the UserConfiguration to use for this cache.
*/
public AtomCache(UserConfiguration config){
this(config.getPdbFilePath(),config.isSplit());
autoFetch = config.getAutoFetch();
}
/** Get the path that is used to cache PDB files.
*
* @return path to a directory
*/
public String getPath() {
return path;
}
/** Set the path that is used to cache PDB files.
*
* @param path to a directory
*/
public void setPath(String path) {
System.setProperty(AbstractUserArgumentProcessor.PDB_DIR,path);
this.path = path;
}
/** Is the organization of files within the directory split, as on the PDB FTP servers,
* or are all files contained in one directory.
* @return flag
*/
public boolean isSplit() {
return isSplit;
}
/** Is the organization of files within the directory split, as on the PDB FTP servers,
* or are all files contained in one directory.
* @param isSplit flag
*/
public void setSplit(boolean isSplit) {
this.isSplit = isSplit;
}
/** Does the cache automatically download files that are missing from the local installation from the PDB FTP site?
*
* @return flag
*/
public boolean isAutoFetch() {
return autoFetch;
}
/** Does the cache automatically download files that are missing from the local installation from the PDB FTP site?
*
* @param autoFetch flag
*/
public void setAutoFetch(boolean autoFetch) {
this.autoFetch = autoFetch;
}
/**
* @param fetchFileEvenIfObsolete the fetchFileEvenIfObsolete to set
*/
public void setFetchFileEvenIfObsolete(boolean fetchFileEvenIfObsolete) {
this.fetchFileEvenIfObsolete = fetchFileEvenIfObsolete;
}
/**forces the cache to fetch the file if its status is OBSOLETE.
* This feature has a higher priority than {@link #setFetchCurrent(boolean)}
* @return the fetchFileEvenIfObsolete
* @author Amr AL-Hossary
* @see #fetchCurrent
* @since 3.0.2
*/
public boolean isFetchFileEvenIfObsolete() {
return fetchFileEvenIfObsolete;
}
/**if enabled, the reader searches for the newest possible PDB ID, if not present in he local installation.
* The {@link #setFetchFileEvenIfObsolete(boolean)} function has a higher priority than this function.
* @param fetchCurrent the fetchCurrent to set
* @author Amr AL-Hossary
* @see #setFetchFileEvenIfObsolete(boolean)
* @since 3.0.2
*/
public void setFetchCurrent(boolean fetchNewestCurrent) {
this.fetchCurrent = fetchNewestCurrent;
}
/**
* @return the fetchCurrent
*/
public boolean isFetchCurrent() {
return fetchCurrent;
}
/**
* Reports whether strict scop naming will be enforced, or whether this AtomCache
* should try to guess some simple variants on scop domains.
* @return true if scop names should be used strictly with no guessing
*/
public boolean isStrictSCOP() {
return strictSCOP;
}
/**
* When strictSCOP is enabled, SCOP domain identifiers (eg 'd1gbga_') are
* matched literally to the SCOP database.
*
* When disabled, some simple mistakes are corrected automatically.
* For instance, the invalid identifier 'd1gbg__' would be corrected to 'd1gbga_' automatically.
* @param strictSCOP Indicates whether strict scop names should be used.
*/
public void setStrictSCOP(boolean strictSCOP) {
this.strictSCOP = strictSCOP;
}
/** Returns the representation of a ScopDomain as a BioJava Structure object
*
* @param domain a scop domain
* @return a Structure object.
* @throws IOException
* @throws StructureException
*/
public Structure getStructureForDomain(ScopDomain domain) throws IOException, StructureException{
Structure s = null;
String pdbId = domain.getPdbId();
try {
s = getStructure(pdbId);
} catch (StructureException ex){
System.err.println("error getting Structure for " + pdbId);
throw new StructureException(ex);
}
String range = "(";
int rangePos = 0;
for ( String r : domain.getRanges()) {
rangePos++;
range+= r;
if ( ( domain.getRanges().size()> 1) && (rangePos < domain.getRanges().size())){
range+=",";
}
}
range+=")";
//System.out.println("getting range for "+ pdbId + " " + range);
Structure n = StructureTools.getSubRanges(s, range);
// get free ligands of first chain...
if ( n.getChains().size()> 0) {
Chain c1 = n.getChains().get(0);
for ( Chain c : s.getChains()) {
if ( c1.getChainID().equals(c.getChainID())) {
List<Group> ligands = c.getAtomLigands();
for(Group g: ligands){
if ( ! c1.getAtomGroups().contains(g)) {
c1.addGroup(g);
}
}
}
}
}
n.setName(domain.getScopId());
n.setPDBCode(domain.getScopId());
return n;
}
/** Returns the CA atoms for the provided name. See {@link #getStructure(String)} for supported naming conventions.
*
* @param name
* @return an array of Atoms.
* @throws IOException
* @throws StructureException
*/
public Atom[] getAtoms(String name) throws IOException,StructureException{
Atom[] atoms = null;
//System.out.println("loading " + name);
Structure s = null;
try {
s = getStructure(name);
} catch (StructureException ex){
System.err.println("error getting Structure for " + name);
throw new StructureException(ex);
}
atoms = StructureTools.getAtomCAArray(s);
/*synchronized (cache){
cache.put(name, atoms);
}*/
return atoms;
}
/** Returns the CA atoms for the provided name. See {@link #getStructure(String)} for supported naming conventions.
*
* @param name
* @param clone flag to make sure that the atoms are getting coned
* @return an array of Atoms.
* @throws IOException
* @throws StructureException
* @deprecated does the same as {@link #getAtoms(String)} ;
*/
public Atom[] getAtoms(String name, boolean clone)throws IOException,StructureException{
Atom[] atoms = getAtoms(name);
if ( clone)
return StructureTools.cloneCAArray(atoms);
return atoms;
}
/** Request a Structure based on a <i>name</i>.
*
* <pre>
Formal specification for how to specify the <i>name</i>:
name := pdbID
| pdbID '.' chainID
| pdbID '.' range
| scopID
range := '('? range (',' range)? ')'?
| chainID
| chainID '_' resNum '-' resNum
pdbID := [0-9][a-zA-Z0-9]{3}
chainID := [a-zA-Z0-9]
scopID := 'd' pdbID [a-z_][0-9_]
resNum := [-+]?[0-9]+[A-Za-z]?
Example structures:
1TIM #whole structure
4HHB.C #single chain
4GCR.A_1-83 #one domain, by residue number
3AA0.A,B #two chains treated as one structure
d2bq6a1 #scop domain
</pre>
*
* With the additional set of rules:
*
* <ul>
* <li>If only a PDB code is provided, the whole structure will be return including ligands, but the first model only (for NMR).
* <li>Chain IDs are case sensitive, PDB ids are not. To specify a particular chain write as: 4hhb.A or 4HHB.A </li>
* <li>To specify a SCOP domain write a scopId e.g. d2bq6a1. Some flexibility can be allowed in SCOP domain names, see {@link #setStrictSCOP(boolean)}</li>
* <li>URLs are accepted as well</li>
* </ul>
*
* @param name
* @return a Structure object, or null if name appears improperly formated (eg too short, etc)
* @throws IOException The PDB file cannot be cached due to IO errors
* @throws StructureException The name appeared valid but did not correspond to a structure.
* Also thrown by some submethods upon errors, eg for poorly formatted subranges.
*/
public Structure getStructure(String name) throws IOException, StructureException{
if ( name.length() < 4)
throw new IllegalArgumentException("Can't interpred IDs that are shorter than 4 residues!");
Structure n = null;
boolean useChainNr = false;
boolean useDomainInfo = false;
String range = null;
int chainNr = -1;
try {
String pdbId = null;
String chainId = null;
if ( name.length() == 4){
pdbId = name;
} else if ( name.startsWith("d")){
// return based on SCOP domain ID
return getStructureFromSCOPDomain(name);
} else if (name.length() == 6){
// name is PDB.CHAINID style (e.g. 4hhb.A)
pdbId = name.substring(0,4);
if ( name.substring(4,5).equals(CHAIN_SPLIT_SYMBOL)) {
chainId = name.substring(5,6);
} else if ( name.substring(4,5).equals(CHAIN_NR_SYMBOL)) {
useChainNr = true;
chainNr = Integer.parseInt(name.substring(5,6));
}
} else if ( (name.length() > 6) &&
(name.contains(CHAIN_NR_SYMBOL) || name.contains(UNDERSCORE)) && (! (name.startsWith("file:/") || name.startsWith("http:/")))) {
// this is a name + range
pdbId = name.substring(0,4);
// this ID has domain split information...
useDomainInfo = true;
range = name.substring(5);
} else if ( name.startsWith("file:/") || name.startsWith("http:/") ) {
// this is a URL
try {
URL url = new URL(name);
return getStructureFromURL(url);
} catch (Exception e){
e.printStackTrace();
return null;
}
}
//System.out.println("got: " + name + " " + pdbId + " " + chainId + " useChainNr:" + useChainNr + " " +chainNr + " useDomainInfo:" + useDomainInfo + " " + range);
if (pdbId == null) {
return null;
}
while ( checkLoading(pdbId) ){
// waiting for loading to be finished...
try {
Thread.sleep(100);
} catch (InterruptedException e){
System.err.println(e.getMessage());
}
}
//long start = System.currentTimeMillis();
Structure s;
flagLoading(pdbId);
try {
PDBFileReader reader = new PDBFileReader();
reader.setPath(path);
reader.setPdbDirectorySplit(isSplit);
reader.setAutoFetch(autoFetch);
reader.setFetchFileEvenIfObsolete(fetchFileEvenIfObsolete);
reader.setFetchCurrent(fetchCurrent);
reader.setFileParsingParameters(params);
s = reader.getStructureById(pdbId.toLowerCase());
} catch (Exception e){
flagLoadingFinished(pdbId);
throw new StructureException(e.getMessage() + " while parsing " + pdbId,e);
}
flagLoadingFinished(pdbId);
//long end = System.currentTimeMillis();
//System.out.println("time to load " + pdbId + " " + (end-start) + "\t size :" + StructureTools.getNrAtoms(s) + "\t cached: " + cache.size());
if ( chainId == null && chainNr < 0 && range == null) {
// we only want the 1st model in this case
return StructureTools.getReducedStructure(s,-1);
}
if ( useChainNr) {
//System.out.println("using ChainNr");
n = StructureTools.getReducedStructure(s, chainNr);
} else if ( useDomainInfo) {
//System.out.println("calling getSubRanges");
n = StructureTools.getSubRanges(s, range);
} else {
//System.out.println("reducing Chain Id " + chainId);
n = StructureTools.getReducedStructure(s, chainId);
}
} catch (Exception e){
e.printStackTrace();
throw new StructureException(e.getMessage() + " while parsing " + name,e);
}
return n;
}
private Structure getStructureFromSCOPDomain(String name)
throws IOException, StructureException {
// looks like a SCOP domain!
ScopDomain domain;
if( this.strictSCOP) {
domain = getScopDomain(name);
} else {
domain = guessScopDomain(name);
}
if ( domain != null){
Structure s = getStructureForDomain(domain);
return s;
}
if( !this.strictSCOP) {
Matcher scopMatch = scopIDregex.matcher(name);
if( scopMatch.matches() ) {
String pdbID = scopMatch.group(1);
String chainID = scopMatch.group(2);
// None of the actual SCOP domains match. Guess that '_' means 'whole chain'
if( !chainID.equals("_") ) {
//Add chain identifier
pdbID += "."+scopMatch.group(2);
}
// Fetch the structure by pdb id
Structure struct = getStructure(pdbID);
if(struct != null) {
System.err.println("Trying chain "+pdbID);
}
return struct;
}
}
throw new StructureException("Unable to get structure for SCOP domain: "+name);
}
private Structure getStructureFromURL(URL url) throws IOException, StructureException {
// looks like a URL for a file was provided:
System.out.println("fetching structure from URL:" + url);
String queryS = url.getQuery();
String chainId = null;
if ( queryS != null && (queryS.startsWith("chainId="))) {
chainId = queryS.substring(8);
String fullu = url.toString();
if (fullu.startsWith("file:") && fullu.endsWith("?"+queryS)) {
// for windowze, drop the query part from the URL again
// otherwise there will be a "file not found error" ...
String newu = fullu.substring(0,(fullu.length()-(("?"+queryS).length())));
//System.out.println(newu);
url = new URL(newu);
}
}
PDBFileReader reader = new PDBFileReader();
reader.setPath(path);
reader.setPdbDirectorySplit(isSplit);
reader.setAutoFetch(autoFetch);
reader.setFetchFileEvenIfObsolete(fetchFileEvenIfObsolete);
reader.setFetchCurrent(fetchCurrent);
reader.setFileParsingParameters(params);
Structure s = reader.getStructure(url);
if ( chainId == null)
return StructureTools.getReducedStructure(s,-1);
else
return StructureTools.getReducedStructure(s,chainId);
}
private static final Pattern scopIDregex = Pattern.compile("d(....)(.)(.)" );
/**
* <p>Guess a scop domain. If an exact match is found, return that.
*
* <p>Otherwise, return the first scop domain found for the specified protein
* such that<ul>
* <li>The chains match, or one of the chains is '_' or '.'.
* <li>The domains match, or one of the domains is '_'.
* </ul>
*
*
* @param name
* @return
* @throws IOException
* @throws StructureException
*/
private ScopDomain guessScopDomain(String name) throws IOException, StructureException {
List<ScopDomain> matches = new LinkedList<ScopDomain>();
// Try exact match first
ScopDomain domain = getScopDomain(name);
if ( domain != null){
return domain;
}
// Didn't work. Guess it!
System.err.println("Warning, could not find SCOP domain: " + name);
Matcher scopMatch = scopIDregex.matcher(name);
if( scopMatch.matches() ) {
String pdbID = scopMatch.group(1);
String chainID = scopMatch.group(2);
String domainID = scopMatch.group(3);
if ( scopInstallation == null) {
scopInstallation = new ScopInstallation(path);
}
for( ScopDomain potentialSCOP : scopInstallation.getDomainsForPDB(pdbID) ) {
Matcher potMatch = scopIDregex.matcher(potentialSCOP.getScopId());
if(potMatch.matches()) {
if( chainID.equals(potMatch.group(2)) ||
chainID.equals("_") || chainID.equals(".") ||
potMatch.group(2).equals("_") || potMatch.group(2).equals(".") ) {
if( domainID.equals(potMatch.group(3)) || domainID.equals("_") || potMatch.group(3).equals("_") ) {
// Match, or near match
matches.add(potentialSCOP);
}
}
}
}
}
Iterator<ScopDomain> match = matches.iterator();
if( match.hasNext() ) {
ScopDomain bestMatch = match.next();
System.err.print("Trying domain "+bestMatch.getScopId()+".");
if( match.hasNext() ) {
System.err.print(" Other possibilities: ");
while(match.hasNext()) {
System.err.print(match.next().getScopId() + " ");
}
}
System.err.println();
return bestMatch;
} else {
return null;
}
}
private boolean checkLoading(String name) {
return currentlyLoading.contains(name);
}
private void flagLoading(String name){
if ( ! currentlyLoading.contains(name))
currentlyLoading.add(name);
}
private void flagLoadingFinished(String name){
currentlyLoading.remove(name);
}
private ScopDomain getScopDomain(String scopId)
{
if ( scopInstallation == null) {
scopInstallation = ScopFactory.getSCOP();
}
return scopInstallation.getDomainByScopID(scopId);
}
public ScopDatabase getScopInstallation() {
if ( scopInstallation == null) {
scopInstallation = ScopFactory.getSCOP();
}
return scopInstallation;
}
public FileParsingParameters getFileParsingParams()
{
return params;
}
public void setFileParsingParams(FileParsingParameters params)
{
this.params = params;
}
/**
* Loads the default biological unit (*.pdb1.gz) file. If it is not available, the original
* PDB file will be loaded, i.e., for NMR structures, where the original files is also the
* biological assembly.
*
* @param pdbId the PDB ID
* @return a structure object
* @throws IOException
* @throws StructureException
* @since 3.2
*/
public Structure getBiologicalUnit(String pdbId) throws StructureException, IOException{
int bioAssemblyId = 1;
boolean bioAssemblyFallback = true;
return getBiologicalAssembly(pdbId, bioAssemblyId, bioAssemblyFallback);
}
/**
* Loads the biological assembly for a given PDB ID and bioAssemblyId.
* If a bioAssemblyId > 0 is specified, the corresponding biological assembly file will be loaded. Note, the
* number of available biological unit files varies. Many entries don't have a biological assembly specified (i.e. NMR structures),
* many entries have only one biological assembly (bioAssemblyId=1), and a few structures have multiple biological assemblies.
* Set bioAssemblyFallback to true, to download the original PDB file in cases that a biological assembly file is not available.
*
* @param pdbId the PDB ID
* @param bioAssemblyId the ID of the biological assembly
* @param bioAssemblyFallback if true, try reading original PDB file in case the biological assembly file is not available
* @return a structure object
* @throws IOException
* @throws StructureException
* @author Peter Rose
* @since 3.2
*/
public Structure getBiologicalAssembly(String pdbId, int bioAssemblyId, boolean bioAssemblyFallback) throws StructureException, IOException {
Structure s;
if (bioAssemblyId < 1) {
throw new StructureException("bioAssemblyID must be greater than zero: " + pdbId +
" bioAssemblyId " + bioAssemblyId);
}
PDBFileReader reader = new PDBFileReader();
reader.setPath(path);
reader.setPdbDirectorySplit(isSplit);
reader.setAutoFetch(autoFetch);
reader.setFetchFileEvenIfObsolete(fetchFileEvenIfObsolete);
reader.setFetchCurrent(fetchCurrent);
reader.setFileParsingParameters(params);
reader.setBioAssemblyId(bioAssemblyId);
reader.setBioAssemblyFallback(bioAssemblyFallback);
s = reader.getStructureById(pdbId.toLowerCase());
s.setPDBCode(pdbId);
return s;
}
}
| biojava3-structure/src/main/java/org/biojava/bio/structure/align/util/AtomCache.java | package org.biojava.bio.structure.align.util;
import java.io.IOException;
import java.net.URL;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.Iterator;
import org.biojava.bio.structure.Atom;
import org.biojava.bio.structure.Chain;
import org.biojava.bio.structure.Group;
import org.biojava.bio.structure.Structure;
import org.biojava.bio.structure.StructureException;
import org.biojava.bio.structure.StructureTools;
import org.biojava.bio.structure.align.ce.AbstractUserArgumentProcessor;
import org.biojava.bio.structure.io.FileParsingParameters;
import org.biojava.bio.structure.io.PDBFileReader;
import org.biojava.bio.structure.scop.ScopDatabase;
import org.biojava.bio.structure.scop.ScopDomain;
import org.biojava.bio.structure.scop.ScopFactory;
import org.biojava.bio.structure.scop.ScopInstallation;
import org.biojava3.core.util.InputStreamProvider;
/** A utility class that provides easy access to Structure objects. If you are running a
* script that is frequently re-using the same PDB structures, the AtomCache keeps an
* in-memory cache of the files for quicker access. The cache is a soft-cache, this
* means it won't cause out of memory exceptions, but garbage collects the data if the
* Java virtual machine needs to free up space. The AtomCache is thread-safe.
*
* @author Andreas Prlic
* @since 3.0
*/
public class AtomCache {
public static final String CHAIN_NR_SYMBOL = ":";
public static final String CHAIN_SPLIT_SYMBOL = ".";
private static final String FILE_SEPARATOR = System.getProperty("file.separator");
String path;
// make sure IDs are loaded uniquely
Collection<String> currentlyLoading = Collections.synchronizedCollection(new TreeSet<String>());
private static ScopDatabase scopInstallation ;
boolean autoFetch;
boolean isSplit;
boolean strictSCOP;
FileParsingParameters params;
private boolean fetchFileEvenIfObsolete;
private boolean fetchCurrent;
/** Creates an instance of an AtomCache that is pointed to the a particular
* path in the file system.
*
* @param pdbFilePath a directory in the file system to use as a location to cache files.
* @param isSplit a flag to indicate if the directory organisation is "split" as on the PDB ftp servers, or if all files are contained in one directory.
*/
public AtomCache(String pdbFilePath, boolean isSplit){
if ( ! pdbFilePath.endsWith(FILE_SEPARATOR))
pdbFilePath += FILE_SEPARATOR;
// we are caching the binary files that contain the PDBs gzipped
// that is the most memory efficient way of caching...
// set the input stream provider to caching mode
System.setProperty(InputStreamProvider.CACHE_PROPERTY, "true");
path = pdbFilePath;
System.setProperty(AbstractUserArgumentProcessor.PDB_DIR,path);
//this.cache = cache;
this.isSplit = isSplit;
autoFetch = true;
fetchFileEvenIfObsolete = false;
fetchCurrent = false;
currentlyLoading.clear();
params = new FileParsingParameters();
// we don't need this here
params.setAlignSeqRes(false);
// no secstruc either
params.setParseSecStruc(false);
//
this.strictSCOP = true;
scopInstallation = null;
}
/** Creates a new AtomCache object based on the provided UserConfiguration.
*
* @param config the UserConfiguration to use for this cache.
*/
public AtomCache(UserConfiguration config){
this(config.getPdbFilePath(),config.isSplit());
autoFetch = config.getAutoFetch();
}
/** Get the path that is used to cache PDB files.
*
* @return path to a directory
*/
public String getPath() {
return path;
}
/** Set the path that is used to cache PDB files.
*
* @param path to a directory
*/
public void setPath(String path) {
System.setProperty(AbstractUserArgumentProcessor.PDB_DIR,path);
this.path = path;
}
/** Is the organization of files within the directory split, as on the PDB FTP servers,
* or are all files contained in one directory.
* @return flag
*/
public boolean isSplit() {
return isSplit;
}
/** Is the organization of files within the directory split, as on the PDB FTP servers,
* or are all files contained in one directory.
* @param isSplit flag
*/
public void setSplit(boolean isSplit) {
this.isSplit = isSplit;
}
/** Does the cache automatically download files that are missing from the local installation from the PDB FTP site?
*
* @return flag
*/
public boolean isAutoFetch() {
return autoFetch;
}
/** Does the cache automatically download files that are missing from the local installation from the PDB FTP site?
*
* @param autoFetch flag
*/
public void setAutoFetch(boolean autoFetch) {
this.autoFetch = autoFetch;
}
/**
* @param fetchFileEvenIfObsolete the fetchFileEvenIfObsolete to set
*/
public void setFetchFileEvenIfObsolete(boolean fetchFileEvenIfObsolete) {
this.fetchFileEvenIfObsolete = fetchFileEvenIfObsolete;
}
/**forces the cache to fetch the file if its status is OBSOLETE.
* This feature has a higher priority than {@link #setFetchCurrent(boolean)}
* @return the fetchFileEvenIfObsolete
* @author Amr AL-Hossary
* @see #fetchCurrent
* @since 3.0.2
*/
public boolean isFetchFileEvenIfObsolete() {
return fetchFileEvenIfObsolete;
}
/**if enabled, the reader searches for the newest possible PDB ID, if not present in he local installation.
* The {@link #setFetchFileEvenIfObsolete(boolean)} function has a higher priority than this function.
* @param fetchCurrent the fetchCurrent to set
* @author Amr AL-Hossary
* @see #setFetchFileEvenIfObsolete(boolean)
* @since 3.0.2
*/
public void setFetchCurrent(boolean fetchNewestCurrent) {
this.fetchCurrent = fetchNewestCurrent;
}
/**
* @return the fetchCurrent
*/
public boolean isFetchCurrent() {
return fetchCurrent;
}
/**
* Reports whether strict scop naming will be enforced, or whether this AtomCache
* should try to guess some simple variants on scop domains.
* @return true if scop names should be used strictly with no guessing
*/
public boolean isStrictSCOP() {
return strictSCOP;
}
/**
* When strictSCOP is enabled, SCOP domain identifiers (eg 'd1gbga_') are
* matched literally to the SCOP database.
*
* When disabled, some simple mistakes are corrected automatically.
* For instance, the invalid identifier 'd1gbg__' would be corrected to 'd1gbga_' automatically.
* @param strictSCOP Indicates whether strict scop names should be used.
*/
public void setStrictSCOP(boolean strictSCOP) {
this.strictSCOP = strictSCOP;
}
/** Returns the representation of a ScopDomain as a BioJava Structure object
*
* @param domain a scop domain
* @return a Structure object.
* @throws IOException
* @throws StructureException
*/
public Structure getStructureForDomain(ScopDomain domain) throws IOException, StructureException{
Structure s = null;
String pdbId = domain.getPdbId();
try {
s = getStructure(pdbId);
} catch (StructureException ex){
System.err.println("error getting Structure for " + pdbId);
throw new StructureException(ex);
}
String range = "(";
int rangePos = 0;
for ( String r : domain.getRanges()) {
rangePos++;
range+= r;
if ( ( domain.getRanges().size()> 1) && (rangePos < domain.getRanges().size())){
range+=",";
}
}
range+=")";
//System.out.println("getting range for "+ pdbId + " " + range);
Structure n = StructureTools.getSubRanges(s, range);
// get free ligands of first chain...
if ( n.getChains().size()> 0) {
Chain c1 = n.getChains().get(0);
for ( Chain c : s.getChains()) {
if ( c1.getChainID().equals(c.getChainID())) {
List<Group> ligands = c.getAtomLigands();
for(Group g: ligands){
if ( ! c1.getAtomGroups().contains(g)) {
c1.addGroup(g);
}
}
}
}
}
n.setName(domain.getScopId());
n.setPDBCode(domain.getScopId());
return n;
}
/** Returns the CA atoms for the provided name. See {@link #getStructure(String)} for supported naming conventions.
*
* @param name
* @return an array of Atoms.
* @throws IOException
* @throws StructureException
*/
public Atom[] getAtoms(String name) throws IOException,StructureException{
Atom[] atoms = null;
//System.out.println("loading " + name);
Structure s = null;
try {
s = getStructure(name);
} catch (StructureException ex){
System.err.println("error getting Structure for " + name);
throw new StructureException(ex);
}
atoms = StructureTools.getAtomCAArray(s);
/*synchronized (cache){
cache.put(name, atoms);
}*/
return atoms;
}
/** Returns the CA atoms for the provided name. See {@link #getStructure(String)} for supported naming conventions.
*
* @param name
* @param clone flag to make sure that the atoms are getting coned
* @return an array of Atoms.
* @throws IOException
* @throws StructureException
* @deprecated does the same as {@link #getAtoms(String)} ;
*/
public Atom[] getAtoms(String name, boolean clone)throws IOException,StructureException{
Atom[] atoms = getAtoms(name);
if ( clone)
return StructureTools.cloneCAArray(atoms);
return atoms;
}
/** Request a Structure based on a <i>name</i>.
* The following rules are applied to this name:
* If only a PDB code is provided, the whole structure will be used for the alignment.
* <ul>
* <li>To specify a particular chain write as: 4hhb.A (chain IDs are case sensitive, PDB ids are not)</li>
* <li>To specify that the 1st chain in a structure should be used write: 4hhb:0 .</li>
* <li>To specify a range of residues write 4hhb.A:1-10 .</li>
* <li>To specify several ranges of residues write 4hhb(A:1-10,B:1-10)
* <li>To specify a SCOP domain write a scopId e.g. d2bq6a1. Some flexibility can be allowed in SCOP domain names, see {@link #setStrictSCOP(boolean)}</li>
* <li>URLs are accepted as well</li>
* </ul>
*
* @param name
* @return a Structure object, or null if name appears improperly formated (eg too short, etc)
* @throws IOException The PDB file cannot be cached due to IO errors
* @throws StructureException The name appeared valid but did not correspond to a structure.
* Also thrown by some submethods upon errors, eg for poorly formatted subranges.
*/
public Structure getStructure(String name) throws IOException, StructureException{
if ( name.length() < 4)
throw new IllegalArgumentException("Can't interpred IDs that are shorter than 4 residues!");
Structure n = null;
boolean useChainNr = false;
boolean useDomainInfo = false;
String range = null;
int chainNr = -1;
try {
String pdbId = null;
String chainId = null;
if ( name.length() == 4){
pdbId = name;
} else if ( name.startsWith("d")){
// return based on SCOP domain ID
return getStructureFromSCOPDomain(name);
} else if (name.length() == 6){
// name is PDB.CHAINID style (e.g. 4hhb.A)
pdbId = name.substring(0,4);
if ( name.substring(4,5).equals(CHAIN_SPLIT_SYMBOL)) {
chainId = name.substring(5,6);
} else if ( name.substring(4,5).equals(CHAIN_NR_SYMBOL)) {
useChainNr = true;
chainNr = Integer.parseInt(name.substring(5,6));
}
} else if ( (name.length() > 6) &&
(name.contains(CHAIN_NR_SYMBOL)) && (! (name.startsWith("file:/") || name.startsWith("http:/")))) {
// this is a name + range
pdbId = name.substring(0,4);
// this ID has domain split information...
useDomainInfo = true;
range = name.substring(5);
} else if ( name.startsWith("file:/") || name.startsWith("http:/") ) {
// this is a URL
try {
URL url = new URL(name);
return getStructureFromURL(url);
} catch (Exception e){
e.printStackTrace();
return null;
}
}
//System.out.println("got: " + name + " " + pdbId + " " + chainId + " useChainNr:" + useChainNr + " " +chainNr + " useDomainInfo:" + useDomainInfo + " " + range);
if (pdbId == null) {
return null;
}
while ( checkLoading(pdbId) ){
// waiting for loading to be finished...
try {
Thread.sleep(100);
} catch (InterruptedException e){
System.err.println(e.getMessage());
}
}
//long start = System.currentTimeMillis();
Structure s;
flagLoading(pdbId);
try {
PDBFileReader reader = new PDBFileReader();
reader.setPath(path);
reader.setPdbDirectorySplit(isSplit);
reader.setAutoFetch(autoFetch);
reader.setFetchFileEvenIfObsolete(fetchFileEvenIfObsolete);
reader.setFetchCurrent(fetchCurrent);
reader.setFileParsingParameters(params);
s = reader.getStructureById(pdbId.toLowerCase());
} catch (Exception e){
flagLoadingFinished(pdbId);
throw new StructureException(e.getMessage() + " while parsing " + pdbId,e);
}
flagLoadingFinished(pdbId);
//long end = System.currentTimeMillis();
//System.out.println("time to load " + pdbId + " " + (end-start) + "\t size :" + StructureTools.getNrAtoms(s) + "\t cached: " + cache.size());
if ( chainId == null && chainNr < 0 && range == null) {
// we only want the 1st model in this case
return StructureTools.getReducedStructure(s,-1);
}
if ( useChainNr) {
//System.out.println("using ChainNr");
n = StructureTools.getReducedStructure(s, chainNr);
} else if ( useDomainInfo) {
//System.out.println("calling getSubRanges");
n = StructureTools.getSubRanges(s, range);
} else {
//System.out.println("reducing Chain Id " + chainId);
n = StructureTools.getReducedStructure(s, chainId);
}
} catch (Exception e){
e.printStackTrace();
throw new StructureException(e.getMessage() + " while parsing " + name,e);
}
return n;
}
private Structure getStructureFromSCOPDomain(String name)
throws IOException, StructureException {
// looks like a SCOP domain!
ScopDomain domain;
if( this.strictSCOP) {
domain = getScopDomain(name);
} else {
domain = guessScopDomain(name);
}
if ( domain != null){
Structure s = getStructureForDomain(domain);
return s;
}
if( !this.strictSCOP) {
Matcher scopMatch = scopIDregex.matcher(name);
if( scopMatch.matches() ) {
String pdbID = scopMatch.group(1);
String chainID = scopMatch.group(2);
// None of the actual SCOP domains match. Guess that '_' means 'whole chain'
if( !chainID.equals("_") ) {
//Add chain identifier
pdbID += "."+scopMatch.group(2);
}
// Fetch the structure by pdb id
Structure struct = getStructure(pdbID);
if(struct != null) {
System.err.println("Trying chain "+pdbID);
}
return struct;
}
}
throw new StructureException("Unable to get structure for SCOP domain: "+name);
}
private Structure getStructureFromURL(URL url) throws IOException, StructureException {
// looks like a URL for a file was provided:
System.out.println("fetching structure from URL:" + url);
String queryS = url.getQuery();
String chainId = null;
if ( queryS != null && (queryS.startsWith("chainId="))) {
chainId = queryS.substring(8);
String fullu = url.toString();
if (fullu.startsWith("file:") && fullu.endsWith("?"+queryS)) {
// for windowze, drop the query part from the URL again
// otherwise there will be a "file not found error" ...
String newu = fullu.substring(0,(fullu.length()-(("?"+queryS).length())));
//System.out.println(newu);
url = new URL(newu);
}
}
PDBFileReader reader = new PDBFileReader();
reader.setPath(path);
reader.setPdbDirectorySplit(isSplit);
reader.setAutoFetch(autoFetch);
reader.setFetchFileEvenIfObsolete(fetchFileEvenIfObsolete);
reader.setFetchCurrent(fetchCurrent);
reader.setFileParsingParameters(params);
Structure s = reader.getStructure(url);
if ( chainId == null)
return StructureTools.getReducedStructure(s,-1);
else
return StructureTools.getReducedStructure(s,chainId);
}
private static final Pattern scopIDregex = Pattern.compile("d(....)(.)(.)" );
/**
* <p>Guess a scop domain. If an exact match is found, return that.
*
* <p>Otherwise, return the first scop domain found for the specified protein
* such that<ul>
* <li>The chains match, or one of the chains is '_' or '.'.
* <li>The domains match, or one of the domains is '_'.
* </ul>
*
*
* @param name
* @return
* @throws IOException
* @throws StructureException
*/
private ScopDomain guessScopDomain(String name) throws IOException, StructureException {
List<ScopDomain> matches = new LinkedList<ScopDomain>();
// Try exact match first
ScopDomain domain = getScopDomain(name);
if ( domain != null){
return domain;
}
// Didn't work. Guess it!
System.err.println("Warning, could not find SCOP domain: " + name);
Matcher scopMatch = scopIDregex.matcher(name);
if( scopMatch.matches() ) {
String pdbID = scopMatch.group(1);
String chainID = scopMatch.group(2);
String domainID = scopMatch.group(3);
if ( scopInstallation == null) {
scopInstallation = new ScopInstallation(path);
}
for( ScopDomain potentialSCOP : scopInstallation.getDomainsForPDB(pdbID) ) {
Matcher potMatch = scopIDregex.matcher(potentialSCOP.getScopId());
if(potMatch.matches()) {
if( chainID.equals(potMatch.group(2)) ||
chainID.equals("_") || chainID.equals(".") ||
potMatch.group(2).equals("_") || potMatch.group(2).equals(".") ) {
if( domainID.equals(potMatch.group(3)) || domainID.equals("_") || potMatch.group(3).equals("_") ) {
// Match, or near match
matches.add(potentialSCOP);
}
}
}
}
}
Iterator<ScopDomain> match = matches.iterator();
if( match.hasNext() ) {
ScopDomain bestMatch = match.next();
System.err.print("Trying domain "+bestMatch.getScopId()+".");
if( match.hasNext() ) {
System.err.print(" Other possibilities: ");
while(match.hasNext()) {
System.err.print(match.next().getScopId() + " ");
}
}
System.err.println();
return bestMatch;
} else {
return null;
}
}
private boolean checkLoading(String name) {
return currentlyLoading.contains(name);
}
private void flagLoading(String name){
if ( ! currentlyLoading.contains(name))
currentlyLoading.add(name);
}
private void flagLoadingFinished(String name){
currentlyLoading.remove(name);
}
private ScopDomain getScopDomain(String scopId)
{
if ( scopInstallation == null) {
scopInstallation = ScopFactory.getSCOP();
}
return scopInstallation.getDomainByScopID(scopId);
}
public ScopDatabase getScopInstallation() {
if ( scopInstallation == null) {
scopInstallation = ScopFactory.getSCOP();
}
return scopInstallation;
}
public FileParsingParameters getFileParsingParams()
{
return params;
}
public void setFileParsingParams(FileParsingParameters params)
{
this.params = params;
}
/**
* Loads the default biological unit (*.pdb1.gz) file. If it is not available, the original
* PDB file will be loaded, i.e., for NMR structures, where the original files is also the
* biological assembly.
*
* @param pdbId the PDB ID
* @return a structure object
* @throws IOException
* @throws StructureException
* @since 3.2
*/
public Structure getBiologicalUnit(String pdbId) throws StructureException, IOException{
int bioAssemblyId = 1;
boolean bioAssemblyFallback = true;
return getBiologicalAssembly(pdbId, bioAssemblyId, bioAssemblyFallback);
}
/**
* Loads the biological assembly for a given PDB ID and bioAssemblyId.
* If a bioAssemblyId > 0 is specified, the corresponding biological assembly file will be loaded. Note, the
* number of available biological unit files varies. Many entries don't have a biological assembly specified (i.e. NMR structures),
* many entries have only one biological assembly (bioAssemblyId=1), and a few structures have multiple biological assemblies.
* Set bioAssemblyFallback to true, to download the original PDB file in cases that a biological assembly file is not available.
*
* @param pdbId the PDB ID
* @param bioAssemblyId the ID of the biological assembly
* @param bioAssemblyFallback if true, try reading original PDB file in case the biological assembly file is not available
* @return a structure object
* @throws IOException
* @throws StructureException
* @author Peter Rose
* @since 3.2
*/
public Structure getBiologicalAssembly(String pdbId, int bioAssemblyId, boolean bioAssemblyFallback) throws StructureException, IOException {
Structure s;
if (bioAssemblyId < 1) {
throw new StructureException("bioAssemblyID must be greater than zero: " + pdbId +
" bioAssemblyId " + bioAssemblyId);
}
PDBFileReader reader = new PDBFileReader();
reader.setPath(path);
reader.setPdbDirectorySplit(isSplit);
reader.setAutoFetch(autoFetch);
reader.setFetchFileEvenIfObsolete(fetchFileEvenIfObsolete);
reader.setFetchCurrent(fetchCurrent);
reader.setFileParsingParameters(params);
reader.setBioAssemblyId(bioAssemblyId);
reader.setBioAssemblyFallback(bioAssemblyFallback);
s = reader.getStructureById(pdbId.toLowerCase());
s.setPDBCode(pdbId);
return s;
}
}
| extending AtomCache to new convention, which is more friendly to be used in file names
git-svn-id: ed25c26de1c5325e8eb0deed0b990ab8af8a4def@9102 7c6358e6-4a41-0410-a743-a5b2a554c398
| biojava3-structure/src/main/java/org/biojava/bio/structure/align/util/AtomCache.java | extending AtomCache to new convention, which is more friendly to be used in file names |
|
Java | apache-2.0 | 9fd657b84ee86af50ee207e2b5d60054964c30e0 | 0 | mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid,mdunker/usergrid |
/*
*
* * Licensed to the Apache Software Foundation (ASF) under one or more
* * contributor license agreements. The ASF licenses this file to You
* * under the Apache License, Version 2.0 (the "License"); you may not
* * use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License. For additional information regarding
* * copyright in this work, please see the NOTICE file in the top level
* * directory of this distribution.
*
*/
package org.apache.usergrid.persistence.queue;
import rx.Observable;
import java.io.IOException;
import java.util.AbstractQueue;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.PriorityBlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* Default queue manager implementation, uses in memory linked queue
*/
public class DefaultQueueManager implements QueueManager {
public ArrayBlockingQueue<QueueMessage> queue = new ArrayBlockingQueue<>(10000);
@Override
public Observable<QueueMessage> getMessages(int limit, int transactionTimeout, int waitTime, Class klass) {
List<QueueMessage> returnQueue = new ArrayList<>();
try {
QueueMessage message=null;
int count = 5;
do {
message = queue.poll(100, TimeUnit.MILLISECONDS);
if (message != null) {
returnQueue.add(message);
}
}while(message!=null && count-->0);
}catch (InterruptedException ie){
throw new RuntimeException(ie);
}
return rx.Observable.from(returnQueue);
}
@Override
public long getQueueDepth() {
return queue.size();
}
@Override
public void commitMessage(QueueMessage queueMessage) {
}
@Override
public void commitMessages(List<QueueMessage> queueMessages) {
}
@Override
public void sendMessages(List bodies) throws IOException {
for(Object body : bodies){
String uuid = UUID.randomUUID().toString();
try {
queue.put(new QueueMessage(uuid, "handle_" + uuid, body, "put type here"));
}catch (InterruptedException ie){
throw new RuntimeException(ie);
}
}
}
@Override
public <T extends Serializable> void sendMessage( final T body ) throws IOException {
String uuid = UUID.randomUUID().toString();
try {
queue.offer(new QueueMessage(uuid, "handle_" + uuid, body, "put type here"),5000,TimeUnit.MILLISECONDS);
}catch (InterruptedException ie){
throw new RuntimeException(ie);
}
}
@Override
public <T extends Serializable> void sendMessageToTopic( final T body ) throws IOException {
sendMessage( body );
}
@Override
public void deleteQueue() {
}
}
| stack/corepersistence/queue/src/main/java/org/apache/usergrid/persistence/queue/DefaultQueueManager.java | /*
*
* * Licensed to the Apache Software Foundation (ASF) under one or more
* * contributor license agreements. The ASF licenses this file to You
* * under the Apache License, Version 2.0 (the "License"); you may not
* * use this file except in compliance with the License.
* * You may obtain a copy of the License at
* *
* * http://www.apache.org/licenses/LICENSE-2.0
* *
* * Unless required by applicable law or agreed to in writing, software
* * distributed under the License is distributed on an "AS IS" BASIS,
* * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* * See the License for the specific language governing permissions and
* * limitations under the License. For additional information regarding
* * copyright in this work, please see the NOTICE file in the top level
* * directory of this distribution.
*
*/
package org.apache.usergrid.persistence.queue;
import rx.Observable;
import java.io.IOException;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ArrayBlockingQueue;
/**
* Default queue manager implementation, uses in memory linked queue
*/
public class DefaultQueueManager implements QueueManager {
public ArrayBlockingQueue<QueueMessage> queue = new ArrayBlockingQueue<>(10000);
@Override
public synchronized Observable<QueueMessage> getMessages(int limit, int transactionTimeout, int waitTime, Class klass) {
List<QueueMessage> returnQueue = new ArrayList<>();
for(int i=0;i<limit;i++){
if(!queue.isEmpty()){
returnQueue.add( queue.remove());
}else{
break;
}
}
return Observable.from( returnQueue);
}
@Override
public long getQueueDepth() {
return queue.size();
}
@Override
public void commitMessage(QueueMessage queueMessage) {
}
@Override
public void commitMessages(List<QueueMessage> queueMessages) {
}
@Override
public synchronized void sendMessages(List bodies) throws IOException {
for(Object body : bodies){
String uuid = UUID.randomUUID().toString();
queue.add(new QueueMessage(uuid,"handle_"+uuid,body,"putappriate type here"));
}
}
@Override
public <T extends Serializable> void sendMessage( final T body ) throws IOException {
String uuid = UUID.randomUUID().toString();
queue.add(new QueueMessage(uuid,"handle_"+uuid,body,"put type here"));
}
@Override
public <T extends Serializable> void sendMessageToTopic( final T body ) throws IOException {
sendMessage( body );
}
@Override
public void deleteQueue() {
}
}
| fix queue manager impl
| stack/corepersistence/queue/src/main/java/org/apache/usergrid/persistence/queue/DefaultQueueManager.java | fix queue manager impl |
|
Java | apache-2.0 | f46931887f0370e07172a5cadac515ec106cfa13 | 0 | apache/felix-dev,apache/felix-dev,apache/felix-dev,apache/felix-dev | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.dm.test;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
import static org.ops4j.pax.exam.CoreOptions.options;
import static org.ops4j.pax.exam.CoreOptions.provision;
import junit.framework.Assert;
import org.apache.felix.dm.DependencyManager;
import org.apache.felix.dm.service.Service;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.Configuration;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
import org.osgi.framework.BundleContext;
@RunWith(JUnit4TestRunner.class)
public class FELIX2344_ExtraDependencyWithCallbackTest extends Base {
@Configuration
public static Option[] configuration() {
return options(
provision(
mavenBundle().groupId("org.osgi").artifactId("org.osgi.compendium").version("4.1.0"),
mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.dependencymanager").versionAsInProject()
)
);
}
@Test
public void testExtraDependencyWithCallback(BundleContext context) {
DependencyManager m = new DependencyManager(context);
// helper class that ensures certain steps get executed in sequence
Ensure e = new Ensure();
// create a service consumer and provider
Service sp = m.createService().setInterface(MyService.class.getName(), null).setImplementation(MyServiceImpl.class);
Service sc = m.createService().setImplementation(new MyClient(e, false, 1));
Service sc2 = m.createService().setImplementation(new MyClient(e, true, 5));
Service sc3 = m.createService().setImplementation(new MyClient(e, true, 9));
Service sc4 = m.createService().setImplementation(new MyClient2(e, true, 13));
Service sc5 = m.createService().setImplementation(new MyClient2(e, false, 16));
// add the provider first, then add the consumer which initially will have no dependencies
// but via the init() method an optional dependency with a callback method will be added
m.add(sp);
m.add(sc);
// remove the consumer again
m.remove(sc);
e.waitForStep(4, 5000);
// next up, add a second consumer, identical to the first, but with a required dependency
// with a callback method which will be added in the init() method
m.add(sc2);
// remove the consumer again
m.remove(sc2);
e.waitForStep(8, 5000);
// now remove the provider, add a third consumer, identical to the second, and after the
// consumer has started, add the provider again
m.remove(sp);
m.add(sc3);
m.add(sp);
e.waitForStep(12, 5000);
// now, remove the provider, add a fourth consumer (using a required autoconfig dependency, not callbacks), and after
// the consumer is started, then add the provider again.
m.remove(sc3);
m.remove(sp);
m.add(sc4);
m.add(sp);
e.waitForStep(15, 5000);
// now, remove the provider, add a fifth consumer (using optional autoconfig, not callbacks), and check
// if the consumer is injected with a NullObject.
m.remove(sc4);
m.remove(sp);
m.add(sc5);
e.waitForStep(18, 5000);
}
public interface MyService {
}
public static class MyServiceImpl implements MyService {
}
public static class MyClient {
volatile MyService m_myService;
private Ensure m_ensure;
private final boolean m_required;
private final int m_startStep;
public MyClient(Ensure e, boolean required, int startStep) {
m_ensure = e;
m_required = required;
m_startStep = startStep;
}
public void init(DependencyManager dm, Service s) {
m_ensure.step(m_startStep);
s.add(dm.createServiceDependency()
.setInstanceBound(true)
.setService(MyService.class)
.setRequired(m_required)
.setCallbacks("bind", null));
}
void bind(MyService myService) {
m_ensure.step(m_startStep + 1);
m_myService = myService;
}
public void start() {
m_ensure.step(m_startStep + 2);
Assert.assertNotNull("Dependendency should have been injected", m_myService);
m_ensure.step(m_startStep + 3);
}
}
// This client is not using callbacks, but instead, it uses auto config.
public static class MyClient2 {
volatile MyService m_myService;
private Ensure m_ensure;
private final boolean m_required;
private final int m_startStep;
public MyClient2(Ensure e, boolean required, int startStep) {
m_ensure = e;
m_required = required;
m_startStep = startStep;
}
public void init(DependencyManager dm, Service s) {
m_ensure.step(m_startStep);
s.add(dm.createServiceDependency()
.setInstanceBound(true)
.setService(MyService.class)
.setRequired(m_required)
.setAutoConfig("m_myService"));
}
public void start() {
m_ensure.step(m_startStep + 1);
Assert.assertNotNull("Dependendency should have been injected", m_myService);
m_ensure.step(m_startStep + 2);
}
}
} | dependencymanager/test/src/test/java/org/apache/felix/dm/test/FELIX2344_ExtraDependencyWithCallbackTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.felix.dm.test;
import static org.ops4j.pax.exam.CoreOptions.mavenBundle;
import static org.ops4j.pax.exam.CoreOptions.options;
import static org.ops4j.pax.exam.CoreOptions.provision;
import junit.framework.Assert;
import org.apache.felix.dm.DependencyManager;
import org.apache.felix.dm.service.Service;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.Configuration;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
import org.osgi.framework.BundleContext;
@RunWith(JUnit4TestRunner.class)
public class FELIX2344_ExtraDependencyWithCallbackTest extends Base {
@Configuration
public static Option[] configuration() {
return options(
provision(
mavenBundle().groupId("org.osgi").artifactId("org.osgi.compendium").version("4.1.0"),
mavenBundle().groupId("org.apache.felix").artifactId("org.apache.felix.dependencymanager").versionAsInProject()
)
);
}
@Test
public void testExtraDependencyWithCallback(BundleContext context) {
DependencyManager m = new DependencyManager(context);
// helper class that ensures certain steps get executed in sequence
Ensure e = new Ensure();
// create a service consumer and provider
Service sp = m.createService().setInterface(MyService.class.getName(), null).setImplementation(MyServiceImpl.class);
Service sc = m.createService().setImplementation(new MyClient(e, false, 1));
Service sc2 = m.createService().setImplementation(new MyClient(e, true, 5));
Service sc3 = m.createService().setImplementation(new MyClient(e, true, 9));
// add the provider first, then add the consumer which initially will have no dependencies
// but via the init() method an optional dependency with a callback method will be added
m.add(sp);
m.add(sc);
// remove the consumer again
m.remove(sc);
e.waitForStep(4, 5000);
// next up, add a second consumer, identical to the first, but with a required dependency
// with a callback method which will be added in the init() method
m.add(sc2);
// remove the consumer again
m.remove(sc2);
e.waitForStep(8, 5000);
// now remove the provider, add a third consumer, identical to the second, and after the
// consumer has started, add the provider again
m.remove(sp);
m.add(sc3);
m.add(sp);
e.waitForStep(12, 5000);
}
public interface MyService {
}
public static class MyServiceImpl implements MyService {
}
public static class MyClient {
MyService m_myService;
private Ensure m_ensure;
private final boolean m_required;
private final int m_startStep;
public MyClient(Ensure e, boolean required, int startStep) {
m_ensure = e;
m_required = required;
m_startStep = startStep;
}
public void init(DependencyManager dm, Service s) {
m_ensure.step(m_startStep);
s.add(dm.createServiceDependency()
.setInstanceBound(true)
.setService(MyService.class)
.setRequired(m_required)
.setCallbacks("bind", null));
}
void bind(MyService myService) {
m_ensure.step(m_startStep + 1);
m_myService = myService;
}
public void start() {
m_ensure.step(m_startStep + 2);
Assert.assertNotNull("Dependendency should have been injected", m_myService);
m_ensure.step(m_startStep + 3);
}
}
}
| FELIX-2344 added more test steps to this case, which makes the test fail again as the issue is not completely solved yet
git-svn-id: e057f57e93a604d3b43d277ae69bde5ebf332112@951944 13f79535-47bb-0310-9956-ffa450edef68
| dependencymanager/test/src/test/java/org/apache/felix/dm/test/FELIX2344_ExtraDependencyWithCallbackTest.java | FELIX-2344 added more test steps to this case, which makes the test fail again as the issue is not completely solved yet |
|
Java | apache-2.0 | af4cd7ef6fe7d1822ef59b7d989587bfc117461a | 0 | afs/rdf-delta,afs/rdf-delta | package org.seaborne.delta.client;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static org.seaborne.delta.DeltaConst.DATA;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import org.apache.jena.atlas.lib.FileOps;
import org.apache.jena.atlas.logging.FmtLog;
import org.apache.jena.tdb.base.file.Location;
import org.seaborne.delta.*;
import org.seaborne.delta.client.DeltaConnection.Backing;
import org.seaborne.delta.lib.IOX;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A "Zone" is a collection of named data sources. */
public class Zone {
private static Logger LOG = LoggerFactory.getLogger(Zone.class);
// Zone state
private volatile boolean INITIALIZED = false;
private Map<Id, DataState> states = new ConcurrentHashMap<>();
private Path connectionStateArea = null;
private Object zoneLock = new Object();
//XXX Current Restriction : one zone.
private Zone() {}
private static Zone singleton = new Zone();
public static Zone get() {
return singleton;
}
public void reset() {
states.clear();
}
/** Reset to the uninitialized state., Shodul not bne needed in normal operation
* mainly for testing. */
public void shutdown() {
synchronized(zoneLock) {
if ( ! INITIALIZED )
return ;
states.clear();
connectionStateArea = null;
INITIALIZED = false;
}
}
public List<Id> localConnections() {
return new ArrayList<>(states.keySet());
}
public void init(String area) {
init(Location.create(area));
}
public void init(Location area) {
if ( INITIALIZED ) {
checkInit(area);
return;
}
synchronized(zoneLock) {
if ( INITIALIZED ) {
checkInit(area);
return;
}
INITIALIZED = true;
connectionStateArea = IOX.asPath(area);
List<Path> x = scanForDataState(area);
x.forEach(p->LOG.info("Connection : "+p));
x.forEach(p->{
DataState dataState = readDataState(p);
states.put(dataState.getDataSourceId(), dataState);
});
}
}
private boolean isInitialized() {
return INITIALIZED;
}
private void checkInit(Location area) {
if ( ! connectionStateArea.equals(area) )
throw new DeltaException("Attempt to reinitialize the Zone: "+connectionStateArea+" => "+area);
}
/** Is there an area aready? */
public boolean exists(Id dsRef) {
return states.containsKey(dsRef);
}
/** Initialize a new area. */
public DataState create(String name, Id dsRef, Backing backing) {
synchronized (zoneLock) {
if ( states.containsKey(dsRef) )
throw new DeltaException("Already exists: data state for " + dsRef + " : name=" + name);
Path conn = connectionStateArea.resolve(name);
FileOps.ensureDir(conn.toString());
Path statePath = conn.resolve(DataState.STATE_FILE);
// XXX PathOps.
Path dataPath = conn.resolve(DATA);
FileOps.ensureDir(dataPath.toString());
// Write disk.
DataState dataState = new DataState(this, statePath, dsRef, 0, null);
states.put(dsRef, dataState);
// switch (backing) {
// case TDB:
// case FILE:
//
// default:
// throw new InternalErrorException("Unknow backing storage type: "+backing);
//
// }
return dataState;
}
}
/** Refresh the DataState of a datasource */
public void refresh(Id datasourceId) {
DataState ds = attach(datasourceId);
if ( ds == null )
return;
ds.refresh();
}
public DataState attach(Id datasourceId) {
if ( ! exists(datasourceId) )
throw new DeltaConfigException("Not found: "+datasourceId);
return states.get(datasourceId);
}
// Synonym for attach.
public DataState get(Id datasourceId) {
return attach(datasourceId);
}
/** Release a {@code DataState}. Do not use the {@code DataState} again. */
public void release(DataState dataState) {
release(dataState.getDataSourceId());
}
/** Release by {@Id DataState}. Do not use the associated {@code DataState} again. */
public void release(Id dsRef) {
states.remove(dsRef);
}
/** Put state file name into DataState then only have here */
private DataState readDataState(Path p) {
Path versionFile = p.resolve(DataState.STATE_FILE);
if ( ! Files.exists(versionFile) )
throw new DeltaConfigException("No state file: "+versionFile);
PersistentState state = new PersistentState(versionFile);
if ( state.getString().isEmpty() )
throw new DeltaConfigException("Error reading state: version file exist but is empty");
DataState dataState = new DataState(this, state) ;
return dataState;
}
/** Scan a directory for DataSources.
* See {@code LocalServer.scanDirectory} for a similar operation on the server side.
*/
private static List<Path> scanForDataState(Location workarea) {
Path dir = IOX.asPath(workarea);
try {
List<Path> datasources = Files.list(dir)
.filter(p->Files.isDirectory(p))
.filter(Zone::isFormattedDataState)
.collect(Collectors.toList());
return datasources;
}
catch (IOException ex) {
DataState.LOG.error("Exception while reading "+dir);
throw IOX.exception(ex);
}
}
private static boolean isFormattedDataState(Path path) {
// Directory: "data/"
// File: "state"
boolean good = true;
Path dataArea = path.resolve(DeltaConst.DATA);
if ( ! Files.exists(dataArea) ) {
FmtLog.warn(DataState.LOG, "No data area: %s", path);
good = false;
//return false;
}
Path pathState = path.resolve(DataState.STATE_FILE);
if ( ! Files.exists(pathState) ) {
FmtLog.warn(DataState.LOG, "No state file: %s", path);
good = false;
}
// Development - try to continue.
return true;
//return good;
}
}
| rdf-delta-client/src/main/java/org/seaborne/delta/client/Zone.java | package org.seaborne.delta.client;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import static org.seaborne.delta.DeltaConst.DATA;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.Collectors;
import org.apache.jena.atlas.lib.FileOps;
import org.apache.jena.atlas.logging.FmtLog;
import org.apache.jena.tdb.base.file.Location;
import org.seaborne.delta.*;
import org.seaborne.delta.client.DeltaConnection.Backing;
import org.seaborne.delta.lib.IOX;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** A "Zone" is a collection of named data sources. */
public class Zone {
private static Logger LOG = LoggerFactory.getLogger(Zone.class);
// Zone state
private volatile boolean INITIALIZED = false;
private Map<Id, DataState> states = new ConcurrentHashMap<>();
private Path connectionStateArea = null;
private Object zoneLock = new Object();
//XXX Current Restriction : one zone.
private Zone() {}
private static Zone singleton = new Zone();
public static Zone get() {
return singleton;
}
public void reset() {
states.clear();
}
/** Reset to the uninitialized state., Shodul not bne needed in normal operation
* mainly for testing. */
public void shutdown() {
synchronized(zoneLock) {
if ( ! INITIALIZED )
return ;
states.clear();
connectionStateArea = null;
INITIALIZED = false;
}
}
public List<Id> localConnections() {
return new ArrayList<>(states.keySet());
}
public void init(String area) {
init(Location.create(area));
}
public void init(Location area) {
if ( INITIALIZED ) {
checkInit(area);
return;
}
synchronized(zoneLock) {
if ( INITIALIZED ) {
checkInit(area);
return;
}
INITIALIZED = true;
connectionStateArea = IOX.asPath(area);
List<Path> x = scanForDataState(area);
x.forEach(p->LOG.info("Connection : "+p));
x.forEach(p->{
DataState dataState = readDataState(p);
states.put(dataState.getDataSourceId(), dataState);
});
}
}
private boolean isInitialized() {
return INITIALIZED;
}
private void checkInit(Location area) {
if ( ! connectionStateArea.equals(area) )
throw new DeltaException("Attempt to reinitialize the Zone: "+connectionStateArea+" => "+area);
}
/** Is there an area aready? */
public boolean exists(Id dsRef) {
return states.containsKey(dsRef);
}
/** Initialize a new area. */
public DataState create(String name, Id dsRef, Backing backing) {
synchronized (zoneLock) {
if ( states.containsKey(dsRef) )
throw new DeltaException("Already exists: data state for " + dsRef + " : name=" + name);
Path conn = connectionStateArea.resolve(name);
FileOps.ensureDir(conn.toString());
Path statePath = conn.resolve(DataState.STATE_FILE);
// XXX PathOps.
Path dataPath = conn.resolve(DATA);
FileOps.ensureDir(dataPath.toString());
// Write disk.
DataState dataState = new DataState(this, statePath, dsRef, 0, null);
states.put(dsRef, dataState);
// switch (backing) {
// case TDB:
// case FILE:
//
// default:
// throw new InternalErrorException("Unknow backing storage type: "+backing);
//
// }
return dataState;
}
}
/** Refresh the DataState of a datasource */
public void refresh(Id datasourceId) {
DataState ds = attach(datasourceId);
if ( ds == null )
return;
ds.refresh();
}
public DataState attach(Id datasourceId) {
if ( ! exists(datasourceId) )
throw new DeltaConfigException("Not found: "+datasourceId);
return states.get(datasourceId);
}
/** Release a {@code DataState}. Do not use the {@code DataState} again. */
public void release(DataState dataState) {
release(dataState.getDataSourceId());
}
/** Release by {@Id DataState}. Do not use the associated {@code DataState} again. */
public void release(Id dsRef) {
states.remove(dsRef);
}
/** Put state file name into DataState then only have here */
private DataState readDataState(Path p) {
Path versionFile = p.resolve(DataState.STATE_FILE);
if ( ! Files.exists(versionFile) )
throw new DeltaConfigException("No state file: "+versionFile);
PersistentState state = new PersistentState(versionFile);
if ( state.getString().isEmpty() )
throw new DeltaConfigException("Error reading state: version file exist but is empty");
DataState dataState = new DataState(this, state) ;
return dataState;
}
/** Scan a directory for DataSources.
* See {@code LocalServer.scanDirectory} for a similar operation on the server side.
*/
private static List<Path> scanForDataState(Location workarea) {
Path dir = IOX.asPath(workarea);
try {
List<Path> datasources = Files.list(dir)
.filter(p->Files.isDirectory(p))
.filter(Zone::isFormattedDataState)
.collect(Collectors.toList());
return datasources;
}
catch (IOException ex) {
DataState.LOG.error("Exception while reading "+dir);
throw IOX.exception(ex);
}
}
private static boolean isFormattedDataState(Path path) {
// Directory: "data/"
// File: "state"
boolean good = true;
Path dataArea = path.resolve(DeltaConst.DATA);
if ( ! Files.exists(dataArea) ) {
FmtLog.warn(DataState.LOG, "No data area: %s", path);
good = false;
//return false;
}
Path pathState = path.resolve(DataState.STATE_FILE);
if ( ! Files.exists(pathState) ) {
FmtLog.warn(DataState.LOG, "No state file: %s", path);
good = false;
}
// Development - try to continue.
return true;
//return good;
}
}
| Zone.get(Id)
| rdf-delta-client/src/main/java/org/seaborne/delta/client/Zone.java | Zone.get(Id) |
|
Java | apache-2.0 | 1ffa006611380c706537329eba690cbac2e5a6a1 | 0 | Turbots/spring-social,wendelas/spring-social,royclarkson/spring-social,domix/spring-social,okohub/spring-social,okohub/spring-social,molindo/spring-social,molindo/spring-social,codeconsole/spring-social,spring-projects/spring-social,shanika/spring-social,wilkinsona/spring-social,shanika/spring-social,wooder79/spring-social_SOCIAL-196,royclarkson/spring-social,wooder79/spring-social_SOCIAL-196,wendelas/spring-social,domix/spring-social,wilkinsona/spring-social,Turbots/spring-social,codeconsole/spring-social,spring-projects/spring-social | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.facebook.web;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.core.MethodParameter;
import org.springframework.web.bind.support.WebArgumentResolver;
import org.springframework.web.context.request.NativeWebRequest;
/**
* Web argument resolver that resolves arguments annotated with {@link FacebookCookieValue}.
* When using Facebook's JavaScript API, the FB.init() call will set a cookie whose name is "fbs_{api key}" if the user is signed into Facebook and if
* they have granted the application permission to access their profile.
* This web argument resolver extracts that information from the cookie (if available) and supplies it to a controller handler method as String values.
* {@link FacebookCookieValue} is required by default. If the access token or user ID cannot be resolved and if the annotation is set to be
* required, an exception will be thrown indicating an illegal state. If the annotation is set to not be required, a null will be returned.
* @author Craig Walls
*/
public class FacebookWebArgumentResolver implements WebArgumentResolver {
private final String apiKey;
private final String appSecret;
public FacebookWebArgumentResolver(String apiKey, String appSecret) {
this.apiKey = apiKey;
this.appSecret = appSecret;
}
public Object resolveArgument(MethodParameter parameter, NativeWebRequest request) throws Exception {
FacebookCookieValue annotation = parameter.getParameterAnnotation(FacebookCookieValue.class);
if (annotation == null) {
return WebArgumentResolver.UNRESOLVED;
}
HttpServletRequest nativeRequest = (HttpServletRequest) request.getNativeRequest();
Map<String, String> cookieData = FacebookCookieParser.getFacebookCookieData(nativeRequest.getCookies(), apiKey, appSecret);
String key = annotation.value();
if (!cookieData.containsKey(key) && annotation.required()) {
throw new IllegalStateException("Missing Facebook cookie value '" + key + "'");
}
return cookieData.get(key);
}
} | spring-social-facebook/src/main/java/org/springframework/social/facebook/web/FacebookWebArgumentResolver.java | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.social.facebook.web;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import org.springframework.core.MethodParameter;
import org.springframework.web.bind.support.WebArgumentResolver;
import org.springframework.web.context.request.NativeWebRequest;
/**
* <p>
* Web argument resolver that resolves arguments annotated with
* {@link FacebookAccessToken} or {@link FacebookUserId}.
* </p>
*
* <p>
* After a user has authenticated with Facebook via the XFBML
* <fb:login-button> tag, their user ID and an access token are stored in
* a cookie whose name is "fbs_{application key}". This web argument resolver
* extracts that information from the cookie (if available) and supplies it to a
* controller handler method as String values.
* </p>
*
* <p>
* Both {@link FacebookAccessToken} and {@link FacebookUserId} are required by
* default. If the access token or user ID cannot be resolved and if the
* annotation is set to be required, an exception will be thrown indicating an
* illegal state. If the annotation is set to not be required, a null will be
* returned.
* </p>
*
* @author Craig Walls
*/
public class FacebookWebArgumentResolver implements WebArgumentResolver {
private final String apiKey;
private final String appSecret;
public FacebookWebArgumentResolver(String apiKey, String appSecret) {
this.apiKey = apiKey;
this.appSecret = appSecret;
}
public Object resolveArgument(MethodParameter parameter, NativeWebRequest request) throws Exception {
FacebookCookieValue annotation = parameter.getParameterAnnotation(FacebookCookieValue.class);
if (annotation == null) {
return WebArgumentResolver.UNRESOLVED;
}
HttpServletRequest nativeRequest = (HttpServletRequest) request.getNativeRequest();
Map<String, String> cookieData = FacebookCookieParser.getFacebookCookieData(nativeRequest.getCookies(), apiKey, appSecret);
String key = annotation.value();
if (!cookieData.containsKey(key) && annotation.required()) {
throw new IllegalStateException("Missing Facebook cookie value '" + key + "'");
}
return cookieData.get(key);
}
} | Revised JavaDoc in FacebookWebArgumentResolver to more accurately describe when the cookie is set.
| spring-social-facebook/src/main/java/org/springframework/social/facebook/web/FacebookWebArgumentResolver.java | Revised JavaDoc in FacebookWebArgumentResolver to more accurately describe when the cookie is set. |
|
Java | apache-2.0 | 8c51347eeaf4ef60e3d31ffa56abf6269c17ea1f | 0 | veithen/camel,haku/camel,erwelch/camel,snurmine/camel,coderczp/camel,noelo/camel,dvankleef/camel,stalet/camel,joakibj/camel,jonmcewen/camel,maschmid/camel,neoramon/camel,sverkera/camel,duro1/camel,bgaudaen/camel,gautric/camel,edigrid/camel,jamesnetherton/camel,manuelh9r/camel,mnki/camel,jameszkw/camel,coderczp/camel,sebi-hgdata/camel,noelo/camel,brreitme/camel,nikvaessen/camel,anton-k11/camel,cunningt/camel,tkopczynski/camel,ssharma/camel,christophd/camel,nboukhed/camel,w4tson/camel,mnki/camel,yuruki/camel,jpav/camel,bdecoste/camel,mike-kukla/camel,gyc567/camel,yuruki/camel,bgaudaen/camel,onders86/camel,lburgazzoli/apache-camel,noelo/camel,sverkera/camel,satishgummadelli/camel,christophd/camel,lburgazzoli/apache-camel,jlpedrosa/camel,johnpoth/camel,coderczp/camel,sabre1041/camel,rparree/camel,davidwilliams1978/camel,tarilabs/camel,jlpedrosa/camel,dmvolod/camel,haku/camel,NetNow/camel,zregvart/camel,chirino/camel,joakibj/camel,YMartsynkevych/camel,sabre1041/camel,trohovsky/camel,edigrid/camel,curso007/camel,gnodet/camel,noelo/camel,skinzer/camel,oalles/camel,objectiser/camel,erwelch/camel,johnpoth/camel,qst-jdc-labs/camel,lasombra/camel,punkhorn/camel-upstream,snadakuduru/camel,duro1/camel,mike-kukla/camel,lowwool/camel,FingolfinTEK/camel,jlpedrosa/camel,jlpedrosa/camel,jpav/camel,anoordover/camel,gilfernandes/camel,YMartsynkevych/camel,pplatek/camel,stalet/camel,onders86/camel,jmandawg/camel,rparree/camel,FingolfinTEK/camel,davidwilliams1978/camel,dsimansk/camel,tadayosi/camel,kevinearls/camel,isavin/camel,veithen/camel,royopa/camel,sirlatrom/camel,isavin/camel,pkletsko/camel,adessaigne/camel,woj-i/camel,apache/camel,dkhanolkar/camel,kevinearls/camel,bgaudaen/camel,partis/camel,MohammedHammam/camel,ekprayas/camel,arnaud-deprez/camel,manuelh9r/camel,lburgazzoli/camel,qst-jdc-labs/camel,objectiser/camel,royopa/camel,gilfernandes/camel,engagepoint/camel,coderczp/camel,logzio/camel,sirlatrom/camel,erwelch/camel,sirlatrom/camel,koscejev/camel,nikhilvibhav/camel,sirlatrom/camel,allancth/camel,bhaveshdt/camel,tarilabs/camel,mcollovati/camel,nboukhed/camel,partis/camel,josefkarasek/camel,tkopczynski/camel,MrCoder/camel,drsquidop/camel,acartapanis/camel,YMartsynkevych/camel,anoordover/camel,skinzer/camel,ramonmaruko/camel,jameszkw/camel,askannon/camel,yury-vashchyla/camel,trohovsky/camel,nikvaessen/camel,driseley/camel,ssharma/camel,gyc567/camel,dpocock/camel,neoramon/camel,isururanawaka/camel,CandleCandle/camel,sverkera/camel,NetNow/camel,isururanawaka/camel,CandleCandle/camel,royopa/camel,rparree/camel,jkorab/camel,yogamaha/camel,stravag/camel,askannon/camel,driseley/camel,jpav/camel,salikjan/camel,chanakaudaya/camel,lburgazzoli/apache-camel,atoulme/camel,hqstevenson/camel,mohanaraosv/camel,oalles/camel,adessaigne/camel,JYBESSON/camel,igarashitm/camel,ge0ffrey/camel,prashant2402/camel,alvinkwekel/camel,royopa/camel,bdecoste/camel,skinzer/camel,lburgazzoli/camel,driseley/camel,pax95/camel,trohovsky/camel,lburgazzoli/apache-camel,isavin/camel,josefkarasek/camel,ullgren/camel,trohovsky/camel,mohanaraosv/camel,curso007/camel,bgaudaen/camel,RohanHart/camel,snadakuduru/camel,arnaud-deprez/camel,gautric/camel,ge0ffrey/camel,gnodet/camel,snurmine/camel,maschmid/camel,dmvolod/camel,joakibj/camel,haku/camel,MrCoder/camel,yuruki/camel,jollygeorge/camel,pplatek/camel,oscerd/camel,yury-vashchyla/camel,dvankleef/camel,veithen/camel,RohanHart/camel,johnpoth/camel,noelo/camel,pax95/camel,mike-kukla/camel,anoordover/camel,DariusX/camel,oalles/camel,engagepoint/camel,lasombra/camel,mcollovati/camel,sebi-hgdata/camel,RohanHart/camel,dvankleef/camel,anoordover/camel,dmvolod/camel,hqstevenson/camel,gnodet/camel,chirino/camel,tlehoux/camel,atoulme/camel,manuelh9r/camel,stalet/camel,edigrid/camel,satishgummadelli/camel,yogamaha/camel,adessaigne/camel,maschmid/camel,sirlatrom/camel,davidwilliams1978/camel,YoshikiHigo/camel,coderczp/camel,stalet/camel,prashant2402/camel,RohanHart/camel,gautric/camel,engagepoint/camel,punkhorn/camel-upstream,anton-k11/camel,bgaudaen/camel,acartapanis/camel,apache/camel,lburgazzoli/camel,NetNow/camel,CandleCandle/camel,eformat/camel,akhettar/camel,jameszkw/camel,mgyongyosi/camel,nicolaferraro/camel,drsquidop/camel,oscerd/camel,ge0ffrey/camel,gnodet/camel,rparree/camel,igarashitm/camel,cunningt/camel,MrCoder/camel,woj-i/camel,bfitzpat/camel,dpocock/camel,punkhorn/camel-upstream,allancth/camel,nikhilvibhav/camel,josefkarasek/camel,lburgazzoli/apache-camel,JYBESSON/camel,FingolfinTEK/camel,snadakuduru/camel,rmarting/camel,askannon/camel,lasombra/camel,acartapanis/camel,yogamaha/camel,christophd/camel,igarashitm/camel,ekprayas/camel,pplatek/camel,mcollovati/camel,mcollovati/camel,brreitme/camel,adessaigne/camel,bfitzpat/camel,rmarting/camel,qst-jdc-labs/camel,borcsokj/camel,iweiss/camel,isururanawaka/camel,neoramon/camel,duro1/camel,salikjan/camel,akhettar/camel,iweiss/camel,mike-kukla/camel,mgyongyosi/camel,hqstevenson/camel,jlpedrosa/camel,lowwool/camel,oalles/camel,joakibj/camel,FingolfinTEK/camel,jameszkw/camel,RohanHart/camel,johnpoth/camel,cunningt/camel,ramonmaruko/camel,joakibj/camel,lasombra/camel,MrCoder/camel,tdiesler/camel,yogamaha/camel,dmvolod/camel,scranton/camel,isururanawaka/camel,zregvart/camel,lowwool/camel,Fabryprog/camel,ekprayas/camel,adessaigne/camel,Thopap/camel,isururanawaka/camel,brreitme/camel,woj-i/camel,manuelh9r/camel,ekprayas/camel,pmoerenhout/camel,maschmid/camel,jonmcewen/camel,tadayosi/camel,oalles/camel,grgrzybek/camel,aaronwalker/camel,pax95/camel,grange74/camel,chirino/camel,davidwilliams1978/camel,objectiser/camel,FingolfinTEK/camel,prashant2402/camel,dvankleef/camel,borcsokj/camel,zregvart/camel,yuruki/camel,manuelh9r/camel,CandleCandle/camel,ullgren/camel,borcsokj/camel,nboukhed/camel,yogamaha/camel,pax95/camel,RohanHart/camel,jkorab/camel,kevinearls/camel,bhaveshdt/camel,ramonmaruko/camel,pplatek/camel,Thopap/camel,aaronwalker/camel,pax95/camel,w4tson/camel,stravag/camel,JYBESSON/camel,satishgummadelli/camel,jarst/camel,dkhanolkar/camel,nboukhed/camel,chirino/camel,bdecoste/camel,tdiesler/camel,MohammedHammam/camel,jollygeorge/camel,mohanaraosv/camel,pkletsko/camel,tkopczynski/camel,yury-vashchyla/camel,lowwool/camel,cunningt/camel,erwelch/camel,zregvart/camel,NickCis/camel,jpav/camel,snurmine/camel,tkopczynski/camel,jpav/camel,MohammedHammam/camel,scranton/camel,davidkarlsen/camel,Thopap/camel,ge0ffrey/camel,rmarting/camel,gyc567/camel,tlehoux/camel,haku/camel,snadakuduru/camel,qst-jdc-labs/camel,yury-vashchyla/camel,rmarting/camel,kevinearls/camel,ekprayas/camel,borcsokj/camel,ge0ffrey/camel,jmandawg/camel,MohammedHammam/camel,grange74/camel,bhaveshdt/camel,gilfernandes/camel,eformat/camel,scranton/camel,nicolaferraro/camel,chirino/camel,coderczp/camel,logzio/camel,mzapletal/camel,oalles/camel,mike-kukla/camel,w4tson/camel,partis/camel,chanakaudaya/camel,tarilabs/camel,Fabryprog/camel,jonmcewen/camel,onders86/camel,anton-k11/camel,bfitzpat/camel,mgyongyosi/camel,skinzer/camel,aaronwalker/camel,jmandawg/camel,jamesnetherton/camel,jkorab/camel,oscerd/camel,NetNow/camel,cunningt/camel,rparree/camel,tdiesler/camel,kevinearls/camel,Fabryprog/camel,drsquidop/camel,woj-i/camel,jarst/camel,jamesnetherton/camel,apache/camel,logzio/camel,arnaud-deprez/camel,dsimansk/camel,driseley/camel,logzio/camel,rmarting/camel,akhettar/camel,dpocock/camel,adessaigne/camel,nboukhed/camel,ramonmaruko/camel,jonmcewen/camel,pmoerenhout/camel,duro1/camel,alvinkwekel/camel,igarashitm/camel,pmoerenhout/camel,ssharma/camel,koscejev/camel,jpav/camel,dpocock/camel,atoulme/camel,rmarting/camel,rparree/camel,brreitme/camel,YoshikiHigo/camel,gilfernandes/camel,dkhanolkar/camel,ullgren/camel,NickCis/camel,nicolaferraro/camel,mgyongyosi/camel,tlehoux/camel,Thopap/camel,gautric/camel,skinzer/camel,YMartsynkevych/camel,anton-k11/camel,lburgazzoli/apache-camel,snurmine/camel,apache/camel,lowwool/camel,mgyongyosi/camel,johnpoth/camel,sabre1041/camel,yogamaha/camel,iweiss/camel,objectiser/camel,pplatek/camel,NickCis/camel,askannon/camel,mzapletal/camel,stravag/camel,pmoerenhout/camel,davidkarlsen/camel,woj-i/camel,lburgazzoli/camel,YoshikiHigo/camel,tlehoux/camel,dmvolod/camel,erwelch/camel,lburgazzoli/camel,Thopap/camel,sebi-hgdata/camel,mzapletal/camel,anton-k11/camel,partis/camel,eformat/camel,JYBESSON/camel,iweiss/camel,aaronwalker/camel,logzio/camel,FingolfinTEK/camel,jkorab/camel,johnpoth/camel,sverkera/camel,jkorab/camel,pkletsko/camel,gyc567/camel,christophd/camel,mohanaraosv/camel,sebi-hgdata/camel,neoramon/camel,MohammedHammam/camel,arnaud-deprez/camel,yuruki/camel,satishgummadelli/camel,tarilabs/camel,duro1/camel,YMartsynkevych/camel,nikvaessen/camel,dvankleef/camel,yury-vashchyla/camel,curso007/camel,iweiss/camel,christophd/camel,veithen/camel,prashant2402/camel,sabre1041/camel,bhaveshdt/camel,askannon/camel,acartapanis/camel,gautric/camel,CandleCandle/camel,nikhilvibhav/camel,onders86/camel,jonmcewen/camel,veithen/camel,dsimansk/camel,trohovsky/camel,oscerd/camel,koscejev/camel,allancth/camel,stalet/camel,jamesnetherton/camel,jarst/camel,driseley/camel,NickCis/camel,dkhanolkar/camel,christophd/camel,pmoerenhout/camel,grgrzybek/camel,gilfernandes/camel,dpocock/camel,logzio/camel,mohanaraosv/camel,skinzer/camel,isavin/camel,grange74/camel,MohammedHammam/camel,royopa/camel,jmandawg/camel,dvankleef/camel,oscerd/camel,qst-jdc-labs/camel,jameszkw/camel,grgrzybek/camel,NickCis/camel,stravag/camel,pkletsko/camel,iweiss/camel,YoshikiHigo/camel,edigrid/camel,sebi-hgdata/camel,tadayosi/camel,grange74/camel,igarashitm/camel,bdecoste/camel,isururanawaka/camel,CodeSmell/camel,satishgummadelli/camel,atoulme/camel,nikvaessen/camel,chanakaudaya/camel,YMartsynkevych/camel,apache/camel,eformat/camel,jarst/camel,brreitme/camel,jonmcewen/camel,nikhilvibhav/camel,akhettar/camel,gyc567/camel,snurmine/camel,mzapletal/camel,joakibj/camel,pax95/camel,jarst/camel,snadakuduru/camel,logzio/camel,davidwilliams1978/camel,borcsokj/camel,duro1/camel,maschmid/camel,yury-vashchyla/camel,YoshikiHigo/camel,onders86/camel,tdiesler/camel,drsquidop/camel,nikvaessen/camel,JYBESSON/camel,igarashitm/camel,acartapanis/camel,sverkera/camel,josefkarasek/camel,mike-kukla/camel,stravag/camel,eformat/camel,tdiesler/camel,YoshikiHigo/camel,manuelh9r/camel,bhaveshdt/camel,kevinearls/camel,scranton/camel,tadayosi/camel,ramonmaruko/camel,hqstevenson/camel,MrCoder/camel,DariusX/camel,ssharma/camel,lowwool/camel,engagepoint/camel,grgrzybek/camel,NickCis/camel,nikvaessen/camel,dsimansk/camel,qst-jdc-labs/camel,haku/camel,punkhorn/camel-upstream,tarilabs/camel,sabre1041/camel,koscejev/camel,arnaud-deprez/camel,mnki/camel,mohanaraosv/camel,grange74/camel,davidkarlsen/camel,aaronwalker/camel,mnki/camel,dsimansk/camel,nicolaferraro/camel,bfitzpat/camel,onders86/camel,lburgazzoli/camel,royopa/camel,anton-k11/camel,gyc567/camel,jamesnetherton/camel,veithen/camel,Fabryprog/camel,ekprayas/camel,sverkera/camel,drsquidop/camel,bfitzpat/camel,koscejev/camel,mzapletal/camel,tkopczynski/camel,jollygeorge/camel,jameszkw/camel,pkletsko/camel,anoordover/camel,borcsokj/camel,scranton/camel,partis/camel,allancth/camel,mgyongyosi/camel,brreitme/camel,grgrzybek/camel,koscejev/camel,allancth/camel,Thopap/camel,hqstevenson/camel,alvinkwekel/camel,askannon/camel,NetNow/camel,scranton/camel,satishgummadelli/camel,curso007/camel,jollygeorge/camel,neoramon/camel,drsquidop/camel,eformat/camel,pmoerenhout/camel,sabre1041/camel,gilfernandes/camel,aaronwalker/camel,jamesnetherton/camel,bdecoste/camel,sirlatrom/camel,gnodet/camel,isavin/camel,w4tson/camel,pplatek/camel,acartapanis/camel,oscerd/camel,jmandawg/camel,MrCoder/camel,davidwilliams1978/camel,w4tson/camel,jmandawg/camel,CodeSmell/camel,atoulme/camel,driseley/camel,grgrzybek/camel,ge0ffrey/camel,neoramon/camel,chirino/camel,mnki/camel,prashant2402/camel,alvinkwekel/camel,josefkarasek/camel,dpocock/camel,ramonmaruko/camel,DariusX/camel,chanakaudaya/camel,woj-i/camel,sebi-hgdata/camel,gautric/camel,curso007/camel,hqstevenson/camel,NetNow/camel,curso007/camel,snadakuduru/camel,nboukhed/camel,tlehoux/camel,anoordover/camel,stalet/camel,bdecoste/camel,lasombra/camel,maschmid/camel,erwelch/camel,bhaveshdt/camel,jlpedrosa/camel,bfitzpat/camel,CodeSmell/camel,edigrid/camel,davidkarlsen/camel,pkletsko/camel,chanakaudaya/camel,apache/camel,dkhanolkar/camel,lasombra/camel,allancth/camel,jkorab/camel,atoulme/camel,snurmine/camel,noelo/camel,CandleCandle/camel,mnki/camel,pplatek/camel,dsimansk/camel,akhettar/camel,DariusX/camel,partis/camel,mzapletal/camel,chanakaudaya/camel,CodeSmell/camel,yuruki/camel,prashant2402/camel,JYBESSON/camel,tkopczynski/camel,tlehoux/camel,isavin/camel,tdiesler/camel,bgaudaen/camel,w4tson/camel,stravag/camel,edigrid/camel,tadayosi/camel,cunningt/camel,grange74/camel,josefkarasek/camel,jollygeorge/camel,jarst/camel,jollygeorge/camel,arnaud-deprez/camel,trohovsky/camel,ullgren/camel,engagepoint/camel,tarilabs/camel,akhettar/camel,dmvolod/camel,tadayosi/camel,ssharma/camel,dkhanolkar/camel,ssharma/camel,haku/camel | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import java.io.File;
import java.io.InputStream;
import java.io.Reader;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.jms.BytesMessage;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.MessageFormatException;
import javax.jms.ObjectMessage;
import javax.jms.Session;
import javax.jms.StreamMessage;
import javax.jms.TextMessage;
import org.w3c.dom.Node;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.NoTypeConversionAvailableException;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.StreamCache;
import org.apache.camel.component.file.GenericFile;
import org.apache.camel.impl.DefaultExchangeHolder;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import static org.apache.camel.component.jms.JmsMessageType.Bytes;
import static org.apache.camel.component.jms.JmsMessageType.Map;
import static org.apache.camel.component.jms.JmsMessageType.Object;
import static org.apache.camel.component.jms.JmsMessageType.Text;
/**
* A Strategy used to convert between a Camel {@link Exchange} and {@link JmsMessage}
* to and from a JMS {@link Message}
*
* @version $Revision$
*/
public class JmsBinding {
private static final transient Log LOG = LogFactory.getLog(JmsBinding.class);
private final JmsEndpoint endpoint;
private final HeaderFilterStrategy headerFilterStrategy;
private final JmsKeyFormatStrategy jmsKeyFormatStrategy;
public JmsBinding() {
this.endpoint = null;
headerFilterStrategy = new JmsHeaderFilterStrategy();
jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy();
}
public JmsBinding(JmsEndpoint endpoint) {
this.endpoint = endpoint;
if (endpoint.getHeaderFilterStrategy() != null) {
headerFilterStrategy = endpoint.getHeaderFilterStrategy();
} else {
headerFilterStrategy = new JmsHeaderFilterStrategy();
}
if (endpoint.getJmsKeyFormatStrategy() != null) {
jmsKeyFormatStrategy = endpoint.getJmsKeyFormatStrategy();
} else {
jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy();
}
}
/**
* Extracts the body from the JMS message
*
* @param exchange the exchange
* @param message the message to extract its body
* @return the body, can be <tt>null</tt>
*/
public Object extractBodyFromJms(Exchange exchange, Message message) {
try {
// is a custom message converter configured on endpoint then use it instead of doing the extraction
// based on message type
if (endpoint != null && endpoint.getMessageConverter() != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body using a custom MessageConverter: " + endpoint.getMessageConverter() + " from JMS message: " + message);
}
return endpoint.getMessageConverter().fromMessage(message);
}
// if we are configured to not map the jms message then return it as body
if (endpoint != null && !endpoint.getConfiguration().isMapJmsMessage()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Option map JMS message is false so using JMS message as body: " + message);
}
return message;
}
if (message instanceof ObjectMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a ObjectMessage from JMS message: " + message);
}
ObjectMessage objectMessage = (ObjectMessage)message;
Object payload = objectMessage.getObject();
if (payload instanceof DefaultExchangeHolder) {
DefaultExchangeHolder holder = (DefaultExchangeHolder) payload;
DefaultExchangeHolder.unmarshal(exchange, holder);
return exchange.getIn().getBody();
} else {
return objectMessage.getObject();
}
} else if (message instanceof TextMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a TextMessage from JMS message: " + message);
}
TextMessage textMessage = (TextMessage)message;
return textMessage.getText();
} else if (message instanceof MapMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a MapMessage from JMS message: " + message);
}
return createMapFromMapMessage((MapMessage)message);
} else if (message instanceof BytesMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a BytesMessage from JMS message: " + message);
}
return createByteArrayFromBytesMessage((BytesMessage)message);
} else if (message instanceof StreamMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a StreamMessage from JMS message: " + message);
}
return message;
} else {
return null;
}
} catch (JMSException e) {
throw new RuntimeCamelException("Failed to extract body due to: " + e + ". Message: " + message, e);
}
}
public Map<String, Object> extractHeadersFromJms(Message jmsMessage, Exchange exchange) {
Map<String, Object> map = new HashMap<String, Object>();
if (jmsMessage != null) {
// lets populate the standard JMS message headers
try {
map.put("JMSCorrelationID", jmsMessage.getJMSCorrelationID());
map.put("JMSDeliveryMode", jmsMessage.getJMSDeliveryMode());
map.put("JMSDestination", jmsMessage.getJMSDestination());
map.put("JMSExpiration", jmsMessage.getJMSExpiration());
map.put("JMSMessageID", jmsMessage.getJMSMessageID());
map.put("JMSPriority", jmsMessage.getJMSPriority());
map.put("JMSRedelivered", jmsMessage.getJMSRedelivered());
map.put("JMSReplyTo", jmsMessage.getJMSReplyTo());
map.put("JMSTimestamp", jmsMessage.getJMSTimestamp());
map.put("JMSType", jmsMessage.getJMSType());
// this works around a bug in the ActiveMQ property handling
map.put("JMSXGroupID", jmsMessage.getStringProperty("JMSXGroupID"));
} catch (JMSException e) {
throw new RuntimeCamelException(e);
}
Enumeration names;
try {
names = jmsMessage.getPropertyNames();
} catch (JMSException e) {
throw new RuntimeCamelException(e);
}
while (names.hasMoreElements()) {
String name = names.nextElement().toString();
try {
Object value = jmsMessage.getObjectProperty(name);
if (headerFilterStrategy != null
&& headerFilterStrategy.applyFilterToExternalHeaders(name, value, exchange)) {
continue;
}
// must decode back from safe JMS header name to original header name
// when storing on this Camel JmsMessage object.
String key = jmsKeyFormatStrategy.decodeKey(name);
map.put(key, value);
} catch (JMSException e) {
throw new RuntimeCamelException(name, e);
}
}
}
return map;
}
public Object getObjectProperty(Message jmsMessage, String name) throws JMSException {
// try a direct lookup first
Object answer = jmsMessage.getObjectProperty(name);
if (answer == null) {
// then encode the key and do another lookup
String key = jmsKeyFormatStrategy.encodeKey(name);
answer = jmsMessage.getObjectProperty(key);
}
return answer;
}
protected byte[] createByteArrayFromBytesMessage(BytesMessage message) throws JMSException {
if (message.getBodyLength() > Integer.MAX_VALUE) {
LOG.warn("Length of BytesMessage is too long: " + message.getBodyLength());
return null;
}
byte[] result = new byte[(int)message.getBodyLength()];
message.readBytes(result);
return result;
}
/**
* Creates a JMS message from the Camel exchange and message
*
* @param exchange the current exchange
* @param session the JMS session used to create the message
* @return a newly created JMS Message instance containing the
* @throws JMSException if the message could not be created
*/
public Message makeJmsMessage(Exchange exchange, Session session) throws JMSException {
return makeJmsMessage(exchange, exchange.getIn(), session, null);
}
/**
* Creates a JMS message from the Camel exchange and message
*
* @param exchange the current exchange
* @param camelMessage the body to make a javax.jms.Message as
* @param session the JMS session used to create the message
* @param cause optional exception occurred that should be sent as reply instead of a regular body
* @return a newly created JMS Message instance containing the
* @throws JMSException if the message could not be created
*/
public Message makeJmsMessage(Exchange exchange, org.apache.camel.Message camelMessage, Session session, Exception cause) throws JMSException {
Message answer = null;
boolean alwaysCopy = endpoint != null && endpoint.getConfiguration().isAlwaysCopyMessage();
if (!alwaysCopy && camelMessage instanceof JmsMessage) {
JmsMessage jmsMessage = (JmsMessage)camelMessage;
if (!jmsMessage.shouldCreateNewMessage()) {
answer = jmsMessage.getJmsMessage();
}
}
if (answer == null) {
if (cause != null) {
// an exception occured so send it as response
if (LOG.isDebugEnabled()) {
LOG.debug("Will create JmsMessage with caused exception: " + cause);
}
// create jms message containg the caused exception
answer = createJmsMessage(cause, session);
} else {
ObjectHelper.notNull(camelMessage, "message");
// create regular jms message using the camel message body
answer = createJmsMessage(exchange, camelMessage.getBody(), camelMessage.getHeaders(), session, exchange.getContext());
appendJmsProperties(answer, exchange, camelMessage);
}
}
return answer;
}
/**
* Appends the JMS headers from the Camel {@link JmsMessage}
*/
public void appendJmsProperties(Message jmsMessage, Exchange exchange) throws JMSException {
appendJmsProperties(jmsMessage, exchange, exchange.getIn());
}
/**
* Appends the JMS headers from the Camel {@link JmsMessage}
*/
public void appendJmsProperties(Message jmsMessage, Exchange exchange, org.apache.camel.Message in) throws JMSException {
Set<Map.Entry<String, Object>> entries = in.getHeaders().entrySet();
for (Map.Entry<String, Object> entry : entries) {
String headerName = entry.getKey();
Object headerValue = entry.getValue();
appendJmsProperty(jmsMessage, exchange, in, headerName, headerValue);
}
}
public void appendJmsProperty(Message jmsMessage, Exchange exchange, org.apache.camel.Message in,
String headerName, Object headerValue) throws JMSException {
if (isStandardJMSHeader(headerName)) {
if (headerName.equals("JMSCorrelationID")) {
jmsMessage.setJMSCorrelationID(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSReplyTo") && headerValue != null) {
jmsMessage.setJMSReplyTo(ExchangeHelper.convertToType(exchange, Destination.class, headerValue));
} else if (headerName.equals("JMSType")) {
jmsMessage.setJMSType(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSPriority")) {
jmsMessage.setJMSPriority(ExchangeHelper.convertToType(exchange, Integer.class, headerValue));
} else if (headerName.equals("JMSDeliveryMode")) {
Integer deliveryMode = ExchangeHelper.convertToType(exchange, Integer.class, headerValue);
jmsMessage.setJMSDeliveryMode(deliveryMode);
jmsMessage.setIntProperty(JmsConstants.JMS_DELIVERY_MODE, deliveryMode);
} else if (headerName.equals("JMSExpiration")) {
jmsMessage.setJMSExpiration(ExchangeHelper.convertToType(exchange, Long.class, headerValue));
} else if (LOG.isTraceEnabled()) {
// The following properties are set by the MessageProducer:
// JMSDestination
// The following are set on the underlying JMS provider:
// JMSMessageID, JMSTimestamp, JMSRedelivered
// log at trace level to not spam log
LOG.trace("Ignoring JMS header: " + headerName + " with value: " + headerValue);
}
} else if (shouldOutputHeader(in, headerName, headerValue, exchange)) {
// only primitive headers and strings is allowed as properties
// see message properties: http://java.sun.com/j2ee/1.4/docs/api/javax/jms/Message.html
Object value = getValidJMSHeaderValue(headerName, headerValue);
if (value != null) {
// must encode to safe JMS header name before setting property on jmsMessage
String key = jmsKeyFormatStrategy.encodeKey(headerName);
jmsMessage.setObjectProperty(key, value);
} else if (LOG.isDebugEnabled()) {
// okay the value is not a primitive or string so we cannot sent it over the wire
LOG.debug("Ignoring non primitive header: " + headerName + " of class: "
+ headerValue.getClass().getName() + " with value: " + headerValue);
}
}
}
/**
* Is the given header a standard JMS header
* @param headerName the header name
* @return <tt>true</tt> if its a standard JMS header
*/
protected boolean isStandardJMSHeader(String headerName) {
if (!headerName.startsWith("JMS")) {
return false;
}
if (headerName.startsWith("JMSX")) {
return false;
}
// IBM WebSphereMQ uses JMS_IBM as special headers
if (headerName.startsWith("JMS_")) {
return false;
}
// the 4th char must be a letter to be a standard JMS header
if (headerName.length() > 3) {
Character fourth = headerName.charAt(3);
if (Character.isLetter(fourth)) {
return true;
}
}
return false;
}
/**
* Strategy to test if the given header is valid according to the JMS spec to be set as a property
* on the JMS message.
* <p/>
* This default implementation will allow:
* <ul>
* <li>any primitives and their counter Objects (Integer, Double etc.)</li>
* <li>String and any other literals, Character, CharSequence</li>
* <li>Boolean</li>
* <li>Number</li>
* <li>java.util.Date</li>
* </ul>
*
* @param headerName the header name
* @param headerValue the header value
* @return the value to use, <tt>null</tt> to ignore this header
*/
protected Object getValidJMSHeaderValue(String headerName, Object headerValue) {
if (headerValue instanceof String) {
return headerValue;
} else if (headerValue instanceof Number) {
return headerValue;
} else if (headerValue instanceof Character) {
return headerValue;
} else if (headerValue instanceof CharSequence) {
return headerValue.toString();
} else if (headerValue instanceof Boolean) {
return headerValue.toString();
} else if (headerValue instanceof Date) {
return headerValue.toString();
}
return null;
}
protected Message createJmsMessage(Exception cause, Session session) throws JMSException {
if (LOG.isTraceEnabled()) {
LOG.trace("Using JmsMessageType: " + Object);
}
return session.createObjectMessage(cause);
}
protected Message createJmsMessage(Exchange exchange, Object body, Map<String, Object> headers, Session session, CamelContext context) throws JMSException {
JmsMessageType type = null;
// special for transferExchange
if (endpoint != null && endpoint.isTransferExchange()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Option transferExchange=true so we use JmsMessageType: Object");
}
Serializable holder = DefaultExchangeHolder.marshal(exchange);
return session.createObjectMessage(holder);
}
// use a custom message converter
if (endpoint != null && endpoint.getMessageConverter() != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Creating JmsMessage using a custom MessageConverter: " + endpoint.getMessageConverter() + " with body: " + body);
}
return endpoint.getMessageConverter().toMessage(body, session);
}
// check if header have a type set, if so we force to use it
if (headers.containsKey(JmsConstants.JMS_MESSAGE_TYPE)) {
type = context.getTypeConverter().convertTo(JmsMessageType.class, headers.get(JmsConstants.JMS_MESSAGE_TYPE));
} else if (endpoint != null && endpoint.getConfiguration().getJmsMessageType() != null) {
// force a specific type from the endpoint configuration
type = endpoint.getConfiguration().getJmsMessageType();
} else {
// let body deterime the type
if (body instanceof Node || body instanceof String) {
type = Text;
} else if (body instanceof byte[] || body instanceof GenericFile || body instanceof File || body instanceof Reader
|| body instanceof InputStream || body instanceof ByteBuffer || body instanceof StreamCache) {
type = Bytes;
} else if (body instanceof Map) {
type = Map;
} else if (body instanceof Serializable) {
type = Object;
}
}
// create the JmsMessage based on the type
if (type != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Using JmsMessageType: " + type);
}
switch (type) {
case Text: {
TextMessage message = session.createTextMessage();
String payload = context.getTypeConverter().convertTo(String.class, exchange, body);
message.setText(payload);
return message;
}
case Bytes: {
BytesMessage message = session.createBytesMessage();
byte[] payload = context.getTypeConverter().convertTo(byte[].class, exchange, body);
message.writeBytes(payload);
return message;
}
case Map: {
MapMessage message = session.createMapMessage();
Map payload = context.getTypeConverter().convertTo(Map.class, exchange, body);
populateMapMessage(message, payload, context);
return message;
}
case Object:
Serializable payload;
try {
payload = context.getTypeConverter().mandatoryConvertTo(Serializable.class, exchange, body);
} catch (NoTypeConversionAvailableException e) {
// cannot convert to serializable then thrown an exception to avoid sending a null message
JMSException cause = new MessageFormatException(e.getMessage());
cause.initCause(e);
throw cause;
}
return session.createObjectMessage(payload);
default:
break;
}
}
// TODO: should we throw an exception instead?
if (LOG.isWarnEnabled()) {
LOG.warn("Cannot determine specific JmsMessage type to use from body class."
+ " Will use generic JmsMessage."
+ (body != null ? (" Body class: " + body.getClass().getCanonicalName()) : " Body is null")
+ ". If you want to send a POJO then your class might need to implement java.io.Serializable"
+ ", or you can force a specific type by setting the jmsMessageType option on the JMS endpoint.");
}
// return a default message
return session.createMessage();
}
/**
* Populates a {@link MapMessage} from a {@link Map} instance.
*/
protected void populateMapMessage(MapMessage message, Map<?, ?> map, CamelContext context)
throws JMSException {
for (Object key : map.keySet()) {
String keyString = CamelContextHelper.convertTo(context, String.class, key);
if (keyString != null) {
message.setObject(keyString, map.get(key));
}
}
}
/**
* Extracts a {@link Map} from a {@link MapMessage}
*/
public Map<String, Object> createMapFromMapMessage(MapMessage message) throws JMSException {
Map<String, Object> answer = new HashMap<String, Object>();
Enumeration names = message.getMapNames();
while (names.hasMoreElements()) {
String name = names.nextElement().toString();
Object value = message.getObject(name);
answer.put(name, value);
}
return answer;
}
/**
* Strategy to allow filtering of headers which are put on the JMS message
* <p/>
* <b>Note</b>: Currently only supports sending java identifiers as keys
*/
protected boolean shouldOutputHeader(org.apache.camel.Message camelMessage, String headerName,
Object headerValue, Exchange exchange) {
return headerFilterStrategy == null
|| !headerFilterStrategy.applyFilterToCamelHeaders(headerName, headerValue, exchange);
}
}
| components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.jms;
import java.io.File;
import java.io.InputStream;
import java.io.Reader;
import java.io.Serializable;
import java.nio.ByteBuffer;
import java.util.Date;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import javax.jms.BytesMessage;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.MapMessage;
import javax.jms.Message;
import javax.jms.ObjectMessage;
import javax.jms.Session;
import javax.jms.StreamMessage;
import javax.jms.TextMessage;
import org.w3c.dom.Node;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.RuntimeCamelException;
import org.apache.camel.StreamCache;
import org.apache.camel.component.file.GenericFile;
import org.apache.camel.impl.DefaultExchangeHolder;
import org.apache.camel.spi.HeaderFilterStrategy;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import static org.apache.camel.component.jms.JmsMessageType.Bytes;
import static org.apache.camel.component.jms.JmsMessageType.Map;
import static org.apache.camel.component.jms.JmsMessageType.Object;
import static org.apache.camel.component.jms.JmsMessageType.Text;
/**
* A Strategy used to convert between a Camel {@link Exchange} and {@link JmsMessage}
* to and from a JMS {@link Message}
*
* @version $Revision$
*/
public class JmsBinding {
private static final transient Log LOG = LogFactory.getLog(JmsBinding.class);
private final JmsEndpoint endpoint;
private final HeaderFilterStrategy headerFilterStrategy;
private final JmsKeyFormatStrategy jmsKeyFormatStrategy;
public JmsBinding() {
this.endpoint = null;
headerFilterStrategy = new JmsHeaderFilterStrategy();
jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy();
}
public JmsBinding(JmsEndpoint endpoint) {
this.endpoint = endpoint;
if (endpoint.getHeaderFilterStrategy() != null) {
headerFilterStrategy = endpoint.getHeaderFilterStrategy();
} else {
headerFilterStrategy = new JmsHeaderFilterStrategy();
}
if (endpoint.getJmsKeyFormatStrategy() != null) {
jmsKeyFormatStrategy = endpoint.getJmsKeyFormatStrategy();
} else {
jmsKeyFormatStrategy = new DefaultJmsKeyFormatStrategy();
}
}
/**
* Extracts the body from the JMS message
*
* @param exchange the exchange
* @param message the message to extract its body
* @return the body, can be <tt>null</tt>
*/
public Object extractBodyFromJms(Exchange exchange, Message message) {
try {
// is a custom message converter configured on endpoint then use it instead of doing the extraction
// based on message type
if (endpoint != null && endpoint.getMessageConverter() != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body using a custom MessageConverter: " + endpoint.getMessageConverter() + " from JMS message: " + message);
}
return endpoint.getMessageConverter().fromMessage(message);
}
// if we are configured to not map the jms message then return it as body
if (endpoint != null && !endpoint.getConfiguration().isMapJmsMessage()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Option map JMS message is false so using JMS message as body: " + message);
}
return message;
}
if (message instanceof ObjectMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a ObjectMessage from JMS message: " + message);
}
ObjectMessage objectMessage = (ObjectMessage)message;
Object payload = objectMessage.getObject();
if (payload instanceof DefaultExchangeHolder) {
DefaultExchangeHolder holder = (DefaultExchangeHolder) payload;
DefaultExchangeHolder.unmarshal(exchange, holder);
return exchange.getIn().getBody();
} else {
return objectMessage.getObject();
}
} else if (message instanceof TextMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a TextMessage from JMS message: " + message);
}
TextMessage textMessage = (TextMessage)message;
return textMessage.getText();
} else if (message instanceof MapMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a MapMessage from JMS message: " + message);
}
return createMapFromMapMessage((MapMessage)message);
} else if (message instanceof BytesMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a BytesMessage from JMS message: " + message);
}
return createByteArrayFromBytesMessage((BytesMessage)message);
} else if (message instanceof StreamMessage) {
if (LOG.isTraceEnabled()) {
LOG.trace("Extracting body as a StreamMessage from JMS message: " + message);
}
return message;
} else {
return null;
}
} catch (JMSException e) {
throw new RuntimeCamelException("Failed to extract body due to: " + e + ". Message: " + message, e);
}
}
public Map<String, Object> extractHeadersFromJms(Message jmsMessage, Exchange exchange) {
Map<String, Object> map = new HashMap<String, Object>();
if (jmsMessage != null) {
// lets populate the standard JMS message headers
try {
map.put("JMSCorrelationID", jmsMessage.getJMSCorrelationID());
map.put("JMSDeliveryMode", jmsMessage.getJMSDeliveryMode());
map.put("JMSDestination", jmsMessage.getJMSDestination());
map.put("JMSExpiration", jmsMessage.getJMSExpiration());
map.put("JMSMessageID", jmsMessage.getJMSMessageID());
map.put("JMSPriority", jmsMessage.getJMSPriority());
map.put("JMSRedelivered", jmsMessage.getJMSRedelivered());
map.put("JMSReplyTo", jmsMessage.getJMSReplyTo());
map.put("JMSTimestamp", jmsMessage.getJMSTimestamp());
map.put("JMSType", jmsMessage.getJMSType());
// this works around a bug in the ActiveMQ property handling
map.put("JMSXGroupID", jmsMessage.getStringProperty("JMSXGroupID"));
} catch (JMSException e) {
throw new RuntimeCamelException(e);
}
Enumeration names;
try {
names = jmsMessage.getPropertyNames();
} catch (JMSException e) {
throw new RuntimeCamelException(e);
}
while (names.hasMoreElements()) {
String name = names.nextElement().toString();
try {
Object value = jmsMessage.getObjectProperty(name);
if (headerFilterStrategy != null
&& headerFilterStrategy.applyFilterToExternalHeaders(name, value, exchange)) {
continue;
}
// must decode back from safe JMS header name to original header name
// when storing on this Camel JmsMessage object.
String key = jmsKeyFormatStrategy.decodeKey(name);
map.put(key, value);
} catch (JMSException e) {
throw new RuntimeCamelException(name, e);
}
}
}
return map;
}
public Object getObjectProperty(Message jmsMessage, String name) throws JMSException {
// try a direct lookup first
Object answer = jmsMessage.getObjectProperty(name);
if (answer == null) {
// then encode the key and do another lookup
String key = jmsKeyFormatStrategy.encodeKey(name);
answer = jmsMessage.getObjectProperty(key);
}
return answer;
}
protected byte[] createByteArrayFromBytesMessage(BytesMessage message) throws JMSException {
if (message.getBodyLength() > Integer.MAX_VALUE) {
LOG.warn("Length of BytesMessage is too long: " + message.getBodyLength());
return null;
}
byte[] result = new byte[(int)message.getBodyLength()];
message.readBytes(result);
return result;
}
/**
* Creates a JMS message from the Camel exchange and message
*
* @param exchange the current exchange
* @param session the JMS session used to create the message
* @return a newly created JMS Message instance containing the
* @throws JMSException if the message could not be created
*/
public Message makeJmsMessage(Exchange exchange, Session session) throws JMSException {
return makeJmsMessage(exchange, exchange.getIn(), session, null);
}
/**
* Creates a JMS message from the Camel exchange and message
*
* @param exchange the current exchange
* @param camelMessage the body to make a javax.jms.Message as
* @param session the JMS session used to create the message
* @param cause optional exception occurred that should be sent as reply instead of a regular body
* @return a newly created JMS Message instance containing the
* @throws JMSException if the message could not be created
*/
public Message makeJmsMessage(Exchange exchange, org.apache.camel.Message camelMessage, Session session, Exception cause) throws JMSException {
Message answer = null;
boolean alwaysCopy = endpoint != null && endpoint.getConfiguration().isAlwaysCopyMessage();
if (!alwaysCopy && camelMessage instanceof JmsMessage) {
JmsMessage jmsMessage = (JmsMessage)camelMessage;
if (!jmsMessage.shouldCreateNewMessage()) {
answer = jmsMessage.getJmsMessage();
}
}
if (answer == null) {
if (cause != null) {
// an exception occured so send it as response
if (LOG.isDebugEnabled()) {
LOG.debug("Will create JmsMessage with caused exception: " + cause);
}
// create jms message containg the caused exception
answer = createJmsMessage(cause, session);
} else {
ObjectHelper.notNull(camelMessage, "message");
// create regular jms message using the camel message body
answer = createJmsMessage(exchange, camelMessage.getBody(), camelMessage.getHeaders(), session, exchange.getContext());
appendJmsProperties(answer, exchange, camelMessage);
}
}
return answer;
}
/**
* Appends the JMS headers from the Camel {@link JmsMessage}
*/
public void appendJmsProperties(Message jmsMessage, Exchange exchange) throws JMSException {
appendJmsProperties(jmsMessage, exchange, exchange.getIn());
}
/**
* Appends the JMS headers from the Camel {@link JmsMessage}
*/
public void appendJmsProperties(Message jmsMessage, Exchange exchange, org.apache.camel.Message in) throws JMSException {
Set<Map.Entry<String, Object>> entries = in.getHeaders().entrySet();
for (Map.Entry<String, Object> entry : entries) {
String headerName = entry.getKey();
Object headerValue = entry.getValue();
appendJmsProperty(jmsMessage, exchange, in, headerName, headerValue);
}
}
public void appendJmsProperty(Message jmsMessage, Exchange exchange, org.apache.camel.Message in,
String headerName, Object headerValue) throws JMSException {
if (isStandardJMSHeader(headerName)) {
if (headerName.equals("JMSCorrelationID")) {
jmsMessage.setJMSCorrelationID(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSReplyTo") && headerValue != null) {
jmsMessage.setJMSReplyTo(ExchangeHelper.convertToType(exchange, Destination.class, headerValue));
} else if (headerName.equals("JMSType")) {
jmsMessage.setJMSType(ExchangeHelper.convertToType(exchange, String.class, headerValue));
} else if (headerName.equals("JMSPriority")) {
jmsMessage.setJMSPriority(ExchangeHelper.convertToType(exchange, Integer.class, headerValue));
} else if (headerName.equals("JMSDeliveryMode")) {
Integer deliveryMode = ExchangeHelper.convertToType(exchange, Integer.class, headerValue);
jmsMessage.setJMSDeliveryMode(deliveryMode);
jmsMessage.setIntProperty(JmsConstants.JMS_DELIVERY_MODE, deliveryMode);
} else if (headerName.equals("JMSExpiration")) {
jmsMessage.setJMSExpiration(ExchangeHelper.convertToType(exchange, Long.class, headerValue));
} else if (LOG.isTraceEnabled()) {
// The following properties are set by the MessageProducer:
// JMSDestination
// The following are set on the underlying JMS provider:
// JMSMessageID, JMSTimestamp, JMSRedelivered
// log at trace level to not spam log
LOG.trace("Ignoring JMS header: " + headerName + " with value: " + headerValue);
}
} else if (shouldOutputHeader(in, headerName, headerValue, exchange)) {
// only primitive headers and strings is allowed as properties
// see message properties: http://java.sun.com/j2ee/1.4/docs/api/javax/jms/Message.html
Object value = getValidJMSHeaderValue(headerName, headerValue);
if (value != null) {
// must encode to safe JMS header name before setting property on jmsMessage
String key = jmsKeyFormatStrategy.encodeKey(headerName);
jmsMessage.setObjectProperty(key, value);
} else if (LOG.isDebugEnabled()) {
// okay the value is not a primitive or string so we cannot sent it over the wire
LOG.debug("Ignoring non primitive header: " + headerName + " of class: "
+ headerValue.getClass().getName() + " with value: " + headerValue);
}
}
}
/**
* Is the given header a standard JMS header
* @param headerName the header name
* @return <tt>true</tt> if its a standard JMS header
*/
protected boolean isStandardJMSHeader(String headerName) {
if (!headerName.startsWith("JMS")) {
return false;
}
if (headerName.startsWith("JMSX")) {
return false;
}
// IBM WebSphereMQ uses JMS_IBM as special headers
if (headerName.startsWith("JMS_")) {
return false;
}
// the 4th char must be a letter to be a standard JMS header
if (headerName.length() > 3) {
Character fourth = headerName.charAt(3);
if (Character.isLetter(fourth)) {
return true;
}
}
return false;
}
/**
* Strategy to test if the given header is valid according to the JMS spec to be set as a property
* on the JMS message.
* <p/>
* This default implementation will allow:
* <ul>
* <li>any primitives and their counter Objects (Integer, Double etc.)</li>
* <li>String and any other literals, Character, CharSequence</li>
* <li>Boolean</li>
* <li>Number</li>
* <li>java.util.Date</li>
* </ul>
*
* @param headerName the header name
* @param headerValue the header value
* @return the value to use, <tt>null</tt> to ignore this header
*/
protected Object getValidJMSHeaderValue(String headerName, Object headerValue) {
if (headerValue instanceof String) {
return headerValue;
} else if (headerValue instanceof Number) {
return headerValue;
} else if (headerValue instanceof Character) {
return headerValue;
} else if (headerValue instanceof CharSequence) {
return headerValue.toString();
} else if (headerValue instanceof Boolean) {
return headerValue.toString();
} else if (headerValue instanceof Date) {
return headerValue.toString();
}
return null;
}
protected Message createJmsMessage(Exception cause, Session session) throws JMSException {
if (LOG.isTraceEnabled()) {
LOG.trace("Using JmsMessageType: " + Object);
}
return session.createObjectMessage(cause);
}
protected Message createJmsMessage(Exchange exchange, Object body, Map<String, Object> headers, Session session, CamelContext context) throws JMSException {
JmsMessageType type = null;
// special for transferExchange
if (endpoint != null && endpoint.isTransferExchange()) {
if (LOG.isTraceEnabled()) {
LOG.trace("Option transferExchange=true so we use JmsMessageType: Object");
}
Serializable holder = DefaultExchangeHolder.marshal(exchange);
return session.createObjectMessage(holder);
}
// use a custom message converter
if (endpoint != null && endpoint.getMessageConverter() != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Creating JmsMessage using a custom MessageConverter: " + endpoint.getMessageConverter() + " with body: " + body);
}
return endpoint.getMessageConverter().toMessage(body, session);
}
// check if header have a type set, if so we force to use it
if (headers.containsKey(JmsConstants.JMS_MESSAGE_TYPE)) {
type = context.getTypeConverter().convertTo(JmsMessageType.class, headers.get(JmsConstants.JMS_MESSAGE_TYPE));
} else if (endpoint != null && endpoint.getConfiguration().getJmsMessageType() != null) {
// force a specific type from the endpoint configuration
type = endpoint.getConfiguration().getJmsMessageType();
} else {
// let body deterime the type
if (body instanceof Node || body instanceof String) {
type = Text;
} else if (body instanceof byte[] || body instanceof GenericFile || body instanceof File || body instanceof Reader
|| body instanceof InputStream || body instanceof ByteBuffer || body instanceof StreamCache) {
type = Bytes;
} else if (body instanceof Map) {
type = Map;
} else if (body instanceof Serializable) {
type = Object;
}
}
// create the JmsMessage based on the type
if (type != null) {
if (LOG.isTraceEnabled()) {
LOG.trace("Using JmsMessageType: " + type);
}
switch (type) {
case Text: {
TextMessage message = session.createTextMessage();
String payload = context.getTypeConverter().convertTo(String.class, exchange, body);
message.setText(payload);
return message;
}
case Bytes: {
BytesMessage message = session.createBytesMessage();
byte[] payload = context.getTypeConverter().convertTo(byte[].class, exchange, body);
message.writeBytes(payload);
return message;
}
case Map: {
MapMessage message = session.createMapMessage();
Map payload = context.getTypeConverter().convertTo(Map.class, exchange, body);
populateMapMessage(message, payload, context);
return message;
}
case Object:
Serializable payload = context.getTypeConverter().convertTo(Serializable.class, exchange, body);
return session.createObjectMessage(payload);
default:
break;
}
}
// TODO: should we throw an exception instead?
if (LOG.isWarnEnabled()) {
LOG.warn("Cannot determine specific JmsMessage type to use from body class."
+ " Will use generic JmsMessage."
+ (body != null ? (" Body class: " + body.getClass().getCanonicalName()) : " Body is null")
+ ". If you want to send a POJO then your class might need to implement java.io.Serializable"
+ ", or you can force a specific type by setting the jmsMessageType option on the JMS endpoint.");
}
// return a default message
return session.createMessage();
}
/**
* Populates a {@link MapMessage} from a {@link Map} instance.
*/
protected void populateMapMessage(MapMessage message, Map<?, ?> map, CamelContext context)
throws JMSException {
for (Object key : map.keySet()) {
String keyString = CamelContextHelper.convertTo(context, String.class, key);
if (keyString != null) {
message.setObject(keyString, map.get(key));
}
}
}
/**
* Extracts a {@link Map} from a {@link MapMessage}
*/
public Map<String, Object> createMapFromMapMessage(MapMessage message) throws JMSException {
Map<String, Object> answer = new HashMap<String, Object>();
Enumeration names = message.getMapNames();
while (names.hasMoreElements()) {
String name = names.nextElement().toString();
Object value = message.getObject(name);
answer.put(name, value);
}
return answer;
}
/**
* Strategy to allow filtering of headers which are put on the JMS message
* <p/>
* <b>Note</b>: Currently only supports sending java identifiers as keys
*/
protected boolean shouldOutputHeader(org.apache.camel.Message camelMessage, String headerName,
Object headerValue, Exchange exchange) {
return headerFilterStrategy == null
|| !headerFilterStrategy.applyFilterToCamelHeaders(headerName, headerValue, exchange);
}
}
| CAMEL-2132: jms producer sending Object should madatory convert to serializable and throw exception if not possible.
git-svn-id: 11f3c9e1d08a13a4be44fe98a6d63a9c00f6ab23@832702 13f79535-47bb-0310-9956-ffa450edef68
| components/camel-jms/src/main/java/org/apache/camel/component/jms/JmsBinding.java | CAMEL-2132: jms producer sending Object should madatory convert to serializable and throw exception if not possible. |
|
Java | apache-2.0 | 79831d300ded47f2d8dc0da09a0c0b1879e687cc | 0 | jvasileff/ceylon-spec,ceylon/ceylon-spec,lucaswerkmeister/ceylon-spec,jvasileff/ceylon-spec,ceylon/ceylon-spec,jvasileff/ceylon-spec,ceylon/ceylon-spec | package com.redhat.ceylon.compiler.typechecker.model;
import static com.redhat.ceylon.compiler.typechecker.model.Util.addToUnion;
import static com.redhat.ceylon.compiler.typechecker.model.Util.addToIntersection;
import static com.redhat.ceylon.compiler.typechecker.model.Util.arguments;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A produced type with actual type arguments.
* This represents something that is actually
* considered a "type" in the language
* specification.
*
* @author Gavin King
*/
public class ProducedType extends ProducedReference {
ProducedType() {}
@Override
public TypeDeclaration getDeclaration() {
return (TypeDeclaration) super.getDeclaration();
}
/**
* Is this type exactly the same type as the
* given type?
*/
public boolean isExactly(ProducedType type) {
if (getDeclaration() instanceof BottomType) {
return type.getDeclaration() instanceof BottomType;
}
else if (getDeclaration() instanceof UnionType) {
List<ProducedType> cases = getCaseTypes();
if (type.getDeclaration() instanceof UnionType) {
List<ProducedType> otherCases = type.getCaseTypes();
if (cases.size()!=otherCases.size()) {
return false;
}
else {
for (ProducedType c: cases) {
boolean found = false;
for (ProducedType oc: otherCases) {
if (c.isExactly(oc)) {
found = true;
break;
}
}
if (!found) {
return false;
}
}
return true;
}
}
else if (cases.size()==1) {
ProducedType st = cases.get(0);
return st.isExactly(type);
}
else {
return false;
}
}
else if (getDeclaration() instanceof IntersectionType) {
List<ProducedType> types = getSatisfiedTypes();
if (type.getDeclaration() instanceof IntersectionType) {
List<ProducedType> otherTypes = type.getSatisfiedTypes();
if (types.size()!=otherTypes.size()) {
return false;
}
else {
for (ProducedType c: types) {
boolean found = false;
for (ProducedType oc: otherTypes) {
if (c.isExactly(oc)) {
found = true;
break;
}
}
if (!found) {
return false;
}
}
return true;
}
}
else if (types.size()==1) {
ProducedType st = types.get(0);
return st.isExactly(type);
}
else {
return false;
}
}
else if (type.getDeclaration() instanceof UnionType) {
List<ProducedType> otherCases = type.getCaseTypes();
if (otherCases.size()==1) {
ProducedType st = otherCases.get(0);
return this.isExactly(st);
}
else {
return false;
}
}
else if (type.getDeclaration() instanceof IntersectionType) {
List<ProducedType> otherTypes = type.getSatisfiedTypes();
if (otherTypes.size()==1) {
ProducedType st = otherTypes.get(0);
return this.isExactly(st);
}
else {
return false;
}
}
else {
if (type.getDeclaration()!=getDeclaration()) {
return false;
}
else {
ProducedType qt = getQualifyingType();
ProducedType tqt = type.getQualifyingType();
if (qt==null) {
if (tqt!=null) {
return false;
}
}
else {
if (tqt==null) {
return false;
}
else {
TypeDeclaration totd = (TypeDeclaration) type.getDeclaration().getContainer();
ProducedType tqts = tqt.getSupertype(totd);
TypeDeclaration otd = (TypeDeclaration) getDeclaration().getContainer();
ProducedType qts = qt.getSupertype(otd);
if ( !qts.isExactly(tqts) ) {
return false;
}
}
}
for (TypeParameter p: getDeclaration().getTypeParameters()) {
ProducedType arg = getTypeArguments().get(p);
ProducedType otherArg = type.getTypeArguments().get(p);
if (arg==null || otherArg==null) {
return false;
/*throw new RuntimeException(
"Missing type argument for: " +
p.getName() + " of " +
getDeclaration().getName());*/
}
else if (!arg.isExactly(otherArg)) {
return false;
}
}
return true;
}
}
}
/**
* Is this type a supertype of the given type?
*/
public boolean isSupertypeOf(ProducedType type) {
return type.isSubtypeOf(this);
}
/**
* Is this type a subtype of the given type?
*/
public boolean isSubtypeOf(ProducedType type) {
return isSubtypeOf(type, null);
}
/**
* Is this type a subtype of the given type? Ignore
* a certain self type constraint.
*/
public boolean isSubtypeOf(ProducedType type, TypeDeclaration selfTypeToIgnore) {
if (getDeclaration() instanceof BottomType) {
return true;
}
else if (type.getDeclaration() instanceof BottomType) {
return false;
}
else if (getDeclaration() instanceof UnionType) {
for (ProducedType ct: getInternalCaseTypes()) {
if (ct==null || !ct.isSubtypeOf(type, selfTypeToIgnore)) {
return false;
}
}
return true;
}
else if (type.getDeclaration() instanceof UnionType) {
for (ProducedType ct: type.getInternalCaseTypes()) {
if (ct!=null && isSubtypeOf(ct, selfTypeToIgnore)) {
return true;
}
}
return false;
}
else if (type.getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: type.getInternalSatisfiedTypes()) {
if (ct!=null && !isSubtypeOf(ct, selfTypeToIgnore)) {
return false;
}
}
return true;
}
else if (getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: getInternalSatisfiedTypes()) {
if (ct==null || ct.isSubtypeOf(type, selfTypeToIgnore)) {
return true;
}
}
return false;
}
else {
ProducedType st = getSupertype(type.getDeclaration(), selfTypeToIgnore);
if (st==null) {
return false;
}
else {
ProducedType stqt = st.getQualifyingType();
ProducedType tqt = type.getQualifyingType();
if (stqt==null) {
if (tqt!=null) {
//probably extraneous!
return false;
}
}
else {
if (tqt==null) {
//probably extraneous!
return false;
}
else {
//note that the qualifying type of the
//given type may be an invariant subtype
//of the type that declares the member
//type, as long as it doesn't refine the
//member type
TypeDeclaration totd = (TypeDeclaration) type.getDeclaration().getContainer();
ProducedType tqts = tqt.getSupertype(totd);
if (!stqt.isSubtypeOf(tqts)) {
return false;
}
}
}
for (TypeParameter p: type.getDeclaration().getTypeParameters()) {
ProducedType arg = st.getTypeArguments().get(p);
ProducedType otherArg = type.getTypeArguments().get(p);
if (arg==null || otherArg==null) {
/*throw new RuntimeException("Missing type argument for type parameter: " +
p.getName() + " of " +
type.getDeclaration().getName());*/
return false;
}
else if (p.isCovariant()) {
if (!arg.isSubtypeOf(otherArg)) {
return false;
}
}
else if (p.isContravariant()) {
if (!otherArg.isSubtypeOf(arg)) {
return false;
}
}
else {
if (!arg.isExactly(otherArg)) {
return false;
}
}
}
return true;
}
}
}
/**
* Eliminate the given type from the union type.
* (Performs a set complement operation.) Note
* that this operation is not robust and only
* works if this is a union of the given type
* with some other types that don't involve the
* given type.
*/
public ProducedType minus(ClassOrInterface ci) {
if (getDeclaration()==ci) {
return new BottomType().getType();
}
else if (getDeclaration() instanceof UnionType) {
List<ProducedType> types = new ArrayList<ProducedType>();
for (ProducedType ct: getCaseTypes()) {
if (ct.getSupertype(ci)==null) {
addToUnion(types, ct.minus(ci));
}
}
UnionType ut = new UnionType();
ut.setCaseTypes(types);
return ut.getType();
}
else {
return this;
}
}
/**
* Substitute the given types for the corresponding
* given type parameters wherever they appear in the
* type.
*/
public ProducedType substitute(Map<TypeParameter, ProducedType> substitutions) {
return new Substitution().substitute(this, substitutions);
}
private ProducedType substituteInternal(Map<TypeParameter, ProducedType> substitutions) {
return new InternalSubstitution().substitute(this, substitutions);
}
/**
* A member or member type of the type with actual type
* arguments to the receiving type and invocation.
*/
public ProducedReference getTypedReference(Declaration member,
List<ProducedType> typeArguments) {
if (member instanceof TypeDeclaration) {
return getTypeMember( (TypeDeclaration) member, typeArguments );
}
else {
return getTypedMember( (TypedDeclaration) member, typeArguments);
}
}
/**
* A member of the type with actual type arguments
* to the receiving type and invocation.
*/
public ProducedTypedReference getTypedMember(TypedDeclaration member,
List<ProducedType> typeArguments) {
ProducedType declaringType = getSupertype((TypeDeclaration) member.getContainer());
/*if (declaringType==null) {
return null;
}
else {*/
ProducedTypedReference ptr = new ProducedTypedReference();
ptr.setDeclaration(member);
ptr.setQualifyingType(declaringType);
Map<TypeParameter, ProducedType> map = arguments(member, declaringType, typeArguments);
//map.putAll(sub(map));
ptr.setTypeArguments(map);
return ptr;
//}
}
/**
* A member type of the type with actual type arguments
* to the receiving type and invocation.
*/
public ProducedType getTypeMember(TypeDeclaration member,
List<ProducedType> typeArguments) {
ProducedType declaringType = getSupertype((TypeDeclaration) member.getContainer());
ProducedType pt = new ProducedType();
pt.setDeclaration(member);
pt.setQualifyingType(declaringType);
Map<TypeParameter, ProducedType> map = arguments(member, declaringType, typeArguments);
//map.putAll(sub(map));
pt.setTypeArguments(map);
return pt;
}
/**
* Substitute invocation type arguments into an upper bound
* on a type parameter of the invocation.
*/
public ProducedType getProducedType(ProducedType receiver, Declaration member,
List<ProducedType> typeArguments) {
ProducedType rst = (receiver==null) ? null :
receiver.getSupertype((TypeDeclaration) member.getContainer());
return new Substitution().substitute(this, arguments(member, rst, typeArguments));
}
public ProducedType getType() {
return this;
}
/**
* Get all supertypes of the type by traversing the whole
* type hierarchy. Avoid using this!
*/
public List<ProducedType> getSupertypes() {
return getSupertypes(new ArrayList<ProducedType>());
}
private List<ProducedType> getSupertypes(List<ProducedType> list) {
if ( isWellDefined() && Util.addToSupertypes(list, this) ) {
ProducedType extendedType = getExtendedType();
if (extendedType!=null) {
extendedType.getSupertypes(list);
}
for (ProducedType dst: getSatisfiedTypes()) {
dst.getSupertypes(list);
}
ProducedType selfType = getSelfType();
if (selfType!=null) {
if (!(selfType.getDeclaration() instanceof TypeParameter)) { //TODO: is this really correct???
selfType.getSupertypes(list);
}
}
List<ProducedType> caseTypes = getCaseTypes();
if (caseTypes!=null /*&& !getDeclaration().getCaseTypes().isEmpty()*/) {
for (ProducedType t: caseTypes) {
List<ProducedType> candidates = t.getSupertypes();
for (ProducedType st: candidates) {
boolean include = true;
for (ProducedType ct: getDeclaration().getCaseTypes()) {
if (!ct.isSubtypeOf(st)) {
include = false;
break;
}
}
if (include) {
Util.addToSupertypes(list, st);
}
}
}
}
}
return list;
}
/**
* Given a type declaration, return a produced type of
* which this type is an invariant subtype.
*/
public ProducedType getSupertype(TypeDeclaration dec) {
return getSupertype(dec, null);
}
/**
* Given a type declaration, return a produced type of
* which this type is an invariant subtype. Ignore a
* given self type constraint.
*/
private ProducedType getSupertype(final TypeDeclaration dec,
TypeDeclaration selfTypeToIgnore) {
Criteria c = new Criteria() {
@Override
public boolean satisfies(TypeDeclaration type) {
return type==dec;
}
};
return getSupertype(c, new ArrayList<ProducedType>(), selfTypeToIgnore);
}
/**
* Given a predicate, return a produced type for a
* declaration satisfying the predicate, of which
* this type is an invariant subtype.
*/
ProducedType getSupertype(Criteria c) {
return getSupertype(c, new ArrayList<ProducedType>(), null);
}
static interface Criteria {
boolean satisfies(TypeDeclaration type);
}
/**
* Search for the most-specialized supertype satisfying
* the given predicate.
*/
private ProducedType getSupertype(final Criteria c, List<ProducedType> list,
final TypeDeclaration ignoringSelfType) {
if (c.satisfies(getDeclaration())) {
return qualifiedByDeclaringType();
}
if ( isWellDefined() && Util.addToSupertypes(list, this) ) {
//search for the most-specific supertype
//for the given declaration
ProducedType result = null;
ProducedType extendedType = getInternalExtendedType();
if (extendedType!=null) {
ProducedType possibleResult = extendedType.getSupertype(c, list,
ignoringSelfType);
if (possibleResult!=null) {
result = possibleResult;
}
}
for (ProducedType dst: getInternalSatisfiedTypes()) {
ProducedType possibleResult = dst.getSupertype(c, list,
ignoringSelfType);
if (possibleResult!=null && (result==null ||
possibleResult.isSubtypeOf(result, ignoringSelfType))) {
result = possibleResult;
}
}
if (getDeclaration()!=ignoringSelfType) {
ProducedType selfType = getInternalSelfType();
if (selfType!=null) {
ProducedType possibleResult = selfType.getSupertype(c, list,
ignoringSelfType);
if (possibleResult!=null && (result==null ||
possibleResult.isSubtypeOf(result, ignoringSelfType))) {
result = possibleResult;
}
}
}
final List<ProducedType> caseTypes = getInternalCaseTypes();
if (caseTypes!=null && !caseTypes.isEmpty()) {
//first find a common superclass or superinterface
//declaration that satisfies the criteria, ignoring
//type arguments for now
Criteria c2 = new Criteria() {
@Override
public boolean satisfies(TypeDeclaration type) {
if ( c.satisfies(type) ) {
for (ProducedType ct: caseTypes) {
if (ct.getSupertype(type, ignoringSelfType)==null) {
return false;
}
}
return true;
}
else {
return false;
}
}
};
ProducedType stc = caseTypes.get(0).getSupertype(c2, list,
ignoringSelfType);
if (stc!=null) {
//we found the declaration, now try to construct a
//produced type that is a true common supertype
ProducedType candidateResult = getCommonSupertype(caseTypes,
stc.getDeclaration(), ignoringSelfType);
if (candidateResult!=null && (result==null ||
candidateResult.isSubtypeOf(result, ignoringSelfType))) {
result = candidateResult;
}
}
}
return result;
}
else {
return null;
}
}
private ProducedType qualifiedByDeclaringType() {
ProducedType qt = getQualifyingType();
if (qt==null) {
return this;
}
else {
ProducedType pt = new ProducedType();
pt.setDeclaration(getDeclaration());
pt.setTypeArguments(getTypeArguments());
//replace the qualifying type with
//the supertype of the qualifying
//type that declares this nested
//type, substituting type arguments
ProducedType declaringType = qt.getSupertype((TypeDeclaration) getDeclaration().getContainer());
pt.setQualifyingType(declaringType);
return pt;
}
}
private static ProducedType getCommonSupertype(final List<ProducedType> caseTypes,
TypeDeclaration dec, final TypeDeclaration selfTypeToIgnore) {
//now try to construct a common produced
//type that is a common supertype by taking
//the type args and unioning them
List<ProducedType> args = new ArrayList<ProducedType>();
for (TypeParameter tp: dec.getTypeParameters()) {
List<ProducedType> list2 = new ArrayList<ProducedType>();
ProducedType result;
if (tp.isContravariant()) {
for (ProducedType pt: caseTypes) {
ProducedType st = pt.getSupertype(dec, selfTypeToIgnore);
if (st==null) {
return null;
}
addToIntersection(list2, st.getTypeArguments().get(tp));
}
IntersectionType it = new IntersectionType();
it.setSatisfiedTypes(list2);
result = it.canonicalize().getType();
}
else {
for (ProducedType pt: caseTypes) {
ProducedType st = pt.getSupertype(dec, selfTypeToIgnore);
if (st==null) {
return null;
}
addToUnion(list2, st.getTypeArguments().get(tp));
}
UnionType ut = new UnionType();
ut.setCaseTypes(list2);
result = ut.getType();
}
args.add(result);
}
//check that the unioned type args
//satisfy the type constraints
for (int i=0; i<args.size(); i++) {
TypeParameter tp = dec.getTypeParameters().get(i);
for (ProducedType ub: tp.getSatisfiedTypes()) {
if (!args.get(i).isSubtypeOf(ub)) {
return null;
}
}
}
//recurse to the qualifying type
ProducedType outerType;
if (dec.isMember()) {
TypeDeclaration outer = (TypeDeclaration) dec.getContainer();
List<ProducedType> list = new ArrayList<ProducedType>();
for (ProducedType pt: caseTypes) {
ProducedType st = pt.getQualifyingType().getSupertype(outer, null);
list.add(st);
}
outerType = getCommonSupertype(list, outer, null);
}
else {
outerType = null;
}
//make the resulting type
ProducedType candidateResult = dec.getProducedType(outerType, args);
//check the the resulting type is *really*
//a subtype (take variance into account)
for (ProducedType pt: caseTypes) {
if (!pt.isSubtypeOf(candidateResult)) {
return null;
}
}
return candidateResult;
}
/**
* Get the type arguments as a tuple.
*/
public List<ProducedType> getTypeArgumentList() {
List<ProducedType> lpt = new ArrayList<ProducedType>();
for (TypeParameter tp : getDeclaration().getTypeParameters()) {
lpt.add(getTypeArguments().get(tp));
}
return lpt;
}
/**
* Determine if this is a decidable supertype, i.e. if it
* obeys the restriction that types with contravariant
* type parameters may only appear in covariant positions.
*/
public List<TypeDeclaration> checkDecidability() {
List<TypeDeclaration> errors = new ArrayList<TypeDeclaration>();
for (TypeParameter tp: getDeclaration().getTypeParameters()) {
ProducedType pt = getTypeArguments().get(tp);
if (pt!=null) {
pt.checkDecidability(tp.isCovariant(), tp.isContravariant(), errors);
}
}
return errors;
}
private void checkDecidability(boolean covariant, boolean contravariant,
List<TypeDeclaration> errors) {
if (getDeclaration() instanceof TypeParameter) {
//nothing to do
}
else if (getDeclaration() instanceof UnionType) {
for (ProducedType ct: getCaseTypes()) {
ct.checkDecidability(covariant, contravariant, errors);
}
}
else if (getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: getSatisfiedTypes()) {
ct.checkDecidability(covariant, contravariant, errors);
}
}
else {
for (TypeParameter tp: getDeclaration().getTypeParameters()) {
if (!covariant && tp.isContravariant()) {
//a type with contravariant parameters appears at
//a contravariant location in satisfies / extends
errors.add(getDeclaration());
}
ProducedType pt = getTypeArguments().get(tp);
if (pt!=null) {
if (tp.isCovariant()) {
pt.checkDecidability(covariant, contravariant, errors);
}
else if (tp.isContravariant()) {
if (covariant|contravariant) {
pt.checkDecidability(!covariant, !contravariant, errors);
}
else {
//else if we are in a nonvariant position, it stays nonvariant
pt.checkDecidability(covariant, contravariant, errors);
}
}
else {
pt.checkDecidability(false, false, errors);
}
}
}
}
}
/**
* Check that this type can appear at a position,
* given the variance of the position (covariant,
* contravariant, or invariant.)
*/
public List<TypeParameter> checkVariance(boolean covariant, boolean contravariant,
Declaration declaration) {
List<TypeParameter> errors = new ArrayList<TypeParameter>();
checkVariance(covariant, contravariant, declaration, errors);
return errors;
}
private void checkVariance(boolean covariant, boolean contravariant,
Declaration declaration, List<TypeParameter> errors) {
//TODO: fix this to allow reporting multiple errors!
if (getDeclaration() instanceof TypeParameter) {
TypeParameter tp = (TypeParameter) getDeclaration();
boolean ok = tp.getDeclaration()==declaration ||
((covariant || !tp.isCovariant()) &&
(contravariant || !tp.isContravariant()));
if (!ok) {
//a covariant type parameter appears in a contravariant location, or
//a contravariant type parameter appears in a covariant location.
errors.add(tp);
}
}
else if (getDeclaration() instanceof UnionType) {
for (ProducedType ct: getCaseTypes()) {
ct.checkVariance(covariant, contravariant, declaration, errors);
}
}
else if (getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: getSatisfiedTypes()) {
ct.checkVariance(covariant, contravariant, declaration, errors);
}
}
else {
for (TypeParameter tp: getDeclaration().getTypeParameters()) {
ProducedType pt = getTypeArguments().get(tp);
if (pt!=null) {
if (tp.isCovariant()) {
pt.checkVariance(covariant, contravariant, declaration, errors);
}
else if (tp.isContravariant()) {
if (covariant|contravariant) {
pt.checkVariance(!covariant, !contravariant, declaration, errors);
}
else {
//else if we are in a nonvariant position, it stays nonvariant
pt.checkVariance(covariant, contravariant, declaration, errors);
}
}
else {
pt.checkVariance(false, false, declaration, errors);
}
}
}
}
}
/**
* Is the type welldefined? Are any of its arguments
* garbage unknown types?
*/
public boolean isWellDefined() {
for (ProducedType at: getTypeArgumentList()) {
if (at==null || !at.isWellDefined() ) {
return false;
}
}
return true;
}
private ProducedType getInternalSelfType() {
ProducedType selfType = getDeclaration().getSelfType();
return selfType==null?null:selfType.substituteInternal(getTypeArguments());
}
private List<ProducedType> getInternalSatisfiedTypes() {
List<ProducedType> satisfiedTypes = new ArrayList<ProducedType>();
for (ProducedType st: getDeclaration().getSatisfiedTypes()) {
satisfiedTypes.add(st.substituteInternal(getTypeArguments()));
}
return satisfiedTypes;
}
private ProducedType getInternalExtendedType() {
ProducedType extendedType = getDeclaration().getExtendedType();
return extendedType==null?null:extendedType.substituteInternal(getTypeArguments());
}
private List<ProducedType> getInternalCaseTypes() {
if (getDeclaration().getCaseTypes()==null) {
return null;
}
else {
List<ProducedType> caseTypes = new ArrayList<ProducedType>();
for (ProducedType ct: getDeclaration().getCaseTypes()) {
caseTypes.add(ct.substituteInternal(getTypeArguments()));
}
return caseTypes;
}
}
private ProducedType getSelfType() {
ProducedType selfType = getDeclaration().getSelfType();
return selfType==null?null:selfType.substitute(getTypeArguments());
}
private List<ProducedType> getSatisfiedTypes() {
List<ProducedType> satisfiedTypes = new ArrayList<ProducedType>();
for (ProducedType st: getDeclaration().getSatisfiedTypes()) {
satisfiedTypes.add(st.substitute(getTypeArguments()));
}
return satisfiedTypes;
}
private ProducedType getExtendedType() {
ProducedType extendedType = getDeclaration().getExtendedType();
return extendedType==null?null:extendedType.substitute(getTypeArguments());
}
private List<ProducedType> getCaseTypes() {
if (getDeclaration().getCaseTypes()==null) {
return null;
}
else {
List<ProducedType> caseTypes = new ArrayList<ProducedType>();
for (ProducedType ct: getDeclaration().getCaseTypes()) {
caseTypes.add(ct.substitute(getTypeArguments()));
}
return caseTypes;
}
}
/**
* Substitutes type arguments for type parameters.
* This default strategy eliminates duplicate types
* from unions after substituting arguments.
* @author Gavin King
*/
static class Substitution {
ProducedType substitute(ProducedType pt,
Map<TypeParameter, ProducedType> substitutions) {
Declaration dec;
if (pt.getDeclaration() instanceof UnionType) {
UnionType ut = new UnionType();
List<ProducedType> types = new ArrayList<ProducedType>();
for (ProducedType ct: pt.getDeclaration().getCaseTypes()) {
addTypeToUnion(ct, substitutions, types);
}
ut.setCaseTypes(types);
dec = ut;
}
else if (pt.getDeclaration() instanceof IntersectionType) {
IntersectionType it = new IntersectionType();
List<ProducedType> types = new ArrayList<ProducedType>();
for (ProducedType ct: pt.getDeclaration().getSatisfiedTypes()) {
addTypeToIntersection(ct, substitutions, types);
}
it.setSatisfiedTypes(types);
dec = it.canonicalize();
}
else {
if (pt.getDeclaration() instanceof TypeParameter) {
ProducedType sub = substitutions.get(pt.getDeclaration());
if (sub!=null) {
return sub;
}
}
dec = pt.getDeclaration();
}
return substitutedType(dec, pt, substitutions);
}
void addTypeToUnion(ProducedType ct, Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
if (ct==null) {
types.add(null);
}
else {
addToUnion(types, substitute(ct, substitutions));
}
}
void addTypeToIntersection(ProducedType ct, Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
if (ct==null) {
types.add(null);
}
else {
addToIntersection(types, substitute(ct, substitutions));
}
}
private Map<TypeParameter, ProducedType> substitutedTypeArguments(ProducedType pt,
Map<TypeParameter, ProducedType> substitutions) {
Map<TypeParameter, ProducedType> map = new HashMap<TypeParameter, ProducedType>();
for (Map.Entry<TypeParameter, ProducedType> e: pt.getTypeArguments().entrySet()) {
if (e.getValue()!=null) {
map.put(e.getKey(), substitute(e.getValue(), substitutions));
}
}
/*ProducedType dt = pt.getDeclaringType();
if (dt!=null) {
map.putAll(substituted(dt, substitutions));
}*/
return map;
}
private ProducedType substitutedType(Declaration dec, ProducedType pt,
Map<TypeParameter, ProducedType> substitutions) {
ProducedType type = new ProducedType();
type.setDeclaration(dec);
ProducedType qt = pt.getQualifyingType();
if (qt!=null) {
type.setQualifyingType(substitute(qt, substitutions));
}
type.setTypeArguments(substitutedTypeArguments(pt, substitutions));
return type;
}
}
/**
* This special strategy for internal use by the
* containing class does not eliminate duplicate
* types from unions after substituting arguments.
* This is to avoid a stack overflow that otherwise
* results! (Determining if a union contains
* duplicates requires recursion to the argument
* substitution code via some very difficult-to-
* understand flow.)
* @author Gavin King
*/
static class InternalSubstitution extends Substitution {
private void addType(ProducedType ct,
Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
if (ct!=null) {
types.add(substitute(ct, substitutions));
}
}
@Override void addTypeToUnion(ProducedType ct,
Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
addType(ct, substitutions, types);
}
@Override void addTypeToIntersection(ProducedType ct,
Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
addType(ct, substitutions, types);
}
}
@Override
public String toString() {
return "Type[" + getProducedTypeName() + "]";
}
public String getProducedTypeName() {
if (getDeclaration()==null) {
//unknown type
return null;
}
String producedTypeName = "";
if (getDeclaration().isMember()) {
producedTypeName += getQualifyingType().getProducedTypeName();
producedTypeName += ".";
}
producedTypeName += getDeclaration().getName();
if (!getTypeArgumentList().isEmpty()) {
producedTypeName += "<";
for (ProducedType t : getTypeArgumentList()) {
if (t==null) {
producedTypeName += "unknown,";
}
else {
producedTypeName += t.getProducedTypeName() + ",";
}
}
producedTypeName += ">";
producedTypeName = producedTypeName.replace(",>", ">");
}
return producedTypeName;
}
public String getProducedTypeQualifiedName() {
if (getDeclaration()==null) {
//unknown type
return null;
}
String producedTypeName = "";
if (getDeclaration().isMember()) {
producedTypeName += getQualifyingType().getProducedTypeQualifiedName();
producedTypeName += ".";
}
producedTypeName += getDeclaration().getQualifiedNameString();
if (!getTypeArgumentList().isEmpty()) {
producedTypeName += "<";
for (ProducedType t : getTypeArgumentList()) {
if (t==null) {
producedTypeName += "?,";
}
else {
producedTypeName += t.getProducedTypeQualifiedName() + ",";
}
}
producedTypeName += ">";
producedTypeName = producedTypeName.replace(",>", ">");
}
return producedTypeName;
}
}
| src/com/redhat/ceylon/compiler/typechecker/model/ProducedType.java | package com.redhat.ceylon.compiler.typechecker.model;
import static com.redhat.ceylon.compiler.typechecker.model.Util.addToUnion;
import static com.redhat.ceylon.compiler.typechecker.model.Util.addToIntersection;
import static com.redhat.ceylon.compiler.typechecker.model.Util.arguments;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* A produced type with actual type arguments.
* This represents something that is actually
* considered a "type" in the language
* specification.
*
* @author Gavin King
*/
public class ProducedType extends ProducedReference {
ProducedType() {}
@Override
public TypeDeclaration getDeclaration() {
return (TypeDeclaration) super.getDeclaration();
}
/**
* Is this type exactly the same type as the
* given type?
*/
public boolean isExactly(ProducedType type) {
if (getDeclaration() instanceof BottomType) {
return type.getDeclaration() instanceof BottomType;
}
else if (getDeclaration() instanceof UnionType) {
List<ProducedType> cases = getCaseTypes();
if (type.getDeclaration() instanceof UnionType) {
List<ProducedType> otherCases = type.getCaseTypes();
if (cases.size()!=otherCases.size()) {
return false;
}
else {
for (ProducedType c: cases) {
boolean found = false;
for (ProducedType oc: otherCases) {
if (c.isExactly(oc)) {
found = true;
break;
}
}
if (!found) {
return false;
}
}
return true;
}
}
else if (cases.size()==1) {
ProducedType st = cases.get(0);
return st.isExactly(type);
}
else {
return false;
}
}
else if (getDeclaration() instanceof IntersectionType) {
List<ProducedType> types = getSatisfiedTypes();
if (type.getDeclaration() instanceof IntersectionType) {
List<ProducedType> otherTypes = type.getSatisfiedTypes();
if (types.size()!=otherTypes.size()) {
return false;
}
else {
for (ProducedType c: types) {
boolean found = false;
for (ProducedType oc: otherTypes) {
if (c.isExactly(oc)) {
found = true;
break;
}
}
if (!found) {
return false;
}
}
return true;
}
}
else if (types.size()==1) {
ProducedType st = types.get(0);
return st.isExactly(type);
}
else {
return false;
}
}
else if (type.getDeclaration() instanceof UnionType) {
List<ProducedType> otherCases = type.getCaseTypes();
if (otherCases.size()==1) {
ProducedType st = otherCases.get(0);
return this.isExactly(st);
}
else {
return false;
}
}
else if (type.getDeclaration() instanceof IntersectionType) {
List<ProducedType> otherTypes = type.getSatisfiedTypes();
if (otherTypes.size()==1) {
ProducedType st = otherTypes.get(0);
return this.isExactly(st);
}
else {
return false;
}
}
else {
if (type.getDeclaration()!=getDeclaration()) {
return false;
}
else {
ProducedType qt = getQualifyingType();
ProducedType tqt = type.getQualifyingType();
if (qt==null) {
if (tqt!=null) {
return false;
}
}
else {
if (tqt==null) {
return false;
}
else {
TypeDeclaration totd = (TypeDeclaration) type.getDeclaration().getContainer();
ProducedType tqts = tqt.getSupertype(totd);
TypeDeclaration otd = (TypeDeclaration) getDeclaration().getContainer();
ProducedType qts = qt.getSupertype(otd);
if ( !qts.isExactly(tqts) ) {
return false;
}
}
}
for (TypeParameter p: getDeclaration().getTypeParameters()) {
ProducedType arg = getTypeArguments().get(p);
ProducedType otherArg = type.getTypeArguments().get(p);
if (arg==null || otherArg==null) {
return false;
/*throw new RuntimeException(
"Missing type argument for: " +
p.getName() + " of " +
getDeclaration().getName());*/
}
else if (!arg.isExactly(otherArg)) {
return false;
}
}
return true;
}
}
}
/**
* Is this type a supertype of the given type?
*/
public boolean isSupertypeOf(ProducedType type) {
return type.isSubtypeOf(this);
}
/**
* Is this type a subtype of the given type?
*/
public boolean isSubtypeOf(ProducedType type) {
return isSubtypeOf(type, null);
}
/**
* Is this type a subtype of the given type? Ignore
* a certain self type constraint.
*/
public boolean isSubtypeOf(ProducedType type, TypeDeclaration selfTypeToIgnore) {
if (getDeclaration() instanceof BottomType) {
return true;
}
else if (type.getDeclaration() instanceof BottomType) {
return false;
}
else if (getDeclaration() instanceof UnionType) {
for (ProducedType ct: getInternalCaseTypes()) {
if (ct==null || !ct.isSubtypeOf(type, selfTypeToIgnore)) {
return false;
}
}
return true;
}
else if (type.getDeclaration() instanceof UnionType) {
for (ProducedType ct: type.getInternalCaseTypes()) {
if (ct!=null && isSubtypeOf(ct, selfTypeToIgnore)) {
return true;
}
}
return false;
}
else if (type.getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: type.getInternalSatisfiedTypes()) {
if (ct!=null && !isSubtypeOf(ct, selfTypeToIgnore)) {
return false;
}
}
return true;
}
else if (getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: getInternalSatisfiedTypes()) {
if (ct==null || ct.isSubtypeOf(type, selfTypeToIgnore)) {
return true;
}
}
return false;
}
else {
ProducedType st = getSupertype(type.getDeclaration(), selfTypeToIgnore);
if (st==null) {
return false;
}
else {
ProducedType stqt = st.getQualifyingType();
ProducedType tqt = type.getQualifyingType();
if (stqt==null) {
if (tqt!=null) {
//probably extraneous!
return false;
}
}
else {
if (tqt==null) {
//probably extraneous!
return false;
}
else {
//note that the qualifying type of the
//given type may be an invariant subtype
//of the type that declares the member
//type, as long as it doesn't refine the
//member type
TypeDeclaration totd = (TypeDeclaration) type.getDeclaration().getContainer();
ProducedType tqts = tqt.getSupertype(totd);
if (!stqt.isSubtypeOf(tqts)) {
return false;
}
}
}
for (TypeParameter p: type.getDeclaration().getTypeParameters()) {
ProducedType arg = st.getTypeArguments().get(p);
ProducedType otherArg = type.getTypeArguments().get(p);
if (arg==null || otherArg==null) {
/*throw new RuntimeException("Missing type argument for type parameter: " +
p.getName() + " of " +
type.getDeclaration().getName());*/
return false;
}
else if (p.isCovariant()) {
if (!arg.isSubtypeOf(otherArg)) {
return false;
}
}
else if (p.isContravariant()) {
if (!otherArg.isSubtypeOf(arg)) {
return false;
}
}
else {
if (!arg.isExactly(otherArg)) {
return false;
}
}
}
return true;
}
}
}
/**
* Eliminate the given type from the union type.
* (Performs a set complement operation.) Note
* that this operation is not robust and only
* works if this is a union of the given type
* with some other types that don't involve the
* given type.
*/
public ProducedType minus(ClassOrInterface ci) {
if (getDeclaration()==ci) {
return new BottomType().getType();
}
else if (getDeclaration() instanceof UnionType) {
List<ProducedType> types = new ArrayList<ProducedType>();
for (ProducedType ct: getCaseTypes()) {
if (ct.getSupertype(ci)==null) {
addToUnion(types, ct.minus(ci));
}
}
UnionType ut = new UnionType();
ut.setCaseTypes(types);
return ut.getType();
}
else {
return this;
}
}
/**
* Substitute the given types for the corresponding
* given type parameters wherever they appear in the
* type.
*/
public ProducedType substitute(Map<TypeParameter, ProducedType> substitutions) {
return new Substitution().substitute(this, substitutions);
}
private ProducedType substituteInternal(Map<TypeParameter, ProducedType> substitutions) {
return new InternalSubstitution().substitute(this, substitutions);
}
/**
* A member or member type of the type with actual type
* arguments to the receiving type and invocation.
*/
public ProducedReference getTypedReference(Declaration member,
List<ProducedType> typeArguments) {
if (member instanceof TypeDeclaration) {
return getTypeMember( (TypeDeclaration) member, typeArguments );
}
else {
return getTypedMember( (TypedDeclaration) member, typeArguments);
}
}
/**
* A member of the type with actual type arguments
* to the receiving type and invocation.
*/
public ProducedTypedReference getTypedMember(TypedDeclaration member,
List<ProducedType> typeArguments) {
ProducedType declaringType = getSupertype((TypeDeclaration) member.getContainer());
/*if (declaringType==null) {
return null;
}
else {*/
ProducedTypedReference ptr = new ProducedTypedReference();
ptr.setDeclaration(member);
ptr.setQualifyingType(declaringType);
Map<TypeParameter, ProducedType> map = arguments(member, declaringType, typeArguments);
//map.putAll(sub(map));
ptr.setTypeArguments(map);
return ptr;
//}
}
/**
* A member type of the type with actual type arguments
* to the receiving type and invocation.
*/
public ProducedType getTypeMember(TypeDeclaration member,
List<ProducedType> typeArguments) {
ProducedType declaringType = getSupertype((TypeDeclaration) member.getContainer());
ProducedType pt = new ProducedType();
pt.setDeclaration(member);
pt.setQualifyingType(declaringType);
Map<TypeParameter, ProducedType> map = arguments(member, declaringType, typeArguments);
//map.putAll(sub(map));
pt.setTypeArguments(map);
return pt;
}
/**
* Substitute invocation type arguments into an upper bound
* on a type parameter of the invocation.
*/
public ProducedType getProducedType(ProducedType receiver, Declaration member,
List<ProducedType> typeArguments) {
ProducedType rst = (receiver==null) ? null :
receiver.getSupertype((TypeDeclaration) member.getContainer());
return new Substitution().substitute(this, arguments(member, rst, typeArguments));
}
public ProducedType getType() {
return this;
}
/**
* Get all supertypes of the type by traversing the whole
* type hierarchy. Avoid using this!
*/
public List<ProducedType> getSupertypes() {
return getSupertypes(new ArrayList<ProducedType>());
}
private List<ProducedType> getSupertypes(List<ProducedType> list) {
if ( isWellDefined() && Util.addToSupertypes(list, this) ) {
ProducedType extendedType = getExtendedType();
if (extendedType!=null) {
extendedType.getSupertypes(list);
}
for (ProducedType dst: getSatisfiedTypes()) {
dst.getSupertypes(list);
}
ProducedType selfType = getSelfType();
if (selfType!=null) {
if (!(selfType.getDeclaration() instanceof TypeParameter)) { //TODO: is this really correct???
selfType.getSupertypes(list);
}
}
List<ProducedType> caseTypes = getCaseTypes();
if (caseTypes!=null /*&& !getDeclaration().getCaseTypes().isEmpty()*/) {
for (ProducedType t: caseTypes) {
List<ProducedType> candidates = t.getSupertypes();
for (ProducedType st: candidates) {
boolean include = true;
for (ProducedType ct: getDeclaration().getCaseTypes()) {
if (!ct.isSubtypeOf(st)) {
include = false;
break;
}
}
if (include) {
Util.addToSupertypes(list, st);
}
}
}
}
}
return list;
}
/**
* Given a type declaration, return a produced type of
* which this type is an invariant subtype.
*/
public ProducedType getSupertype(TypeDeclaration dec) {
return getSupertype(dec, null);
}
/**
* Given a type declaration, return a produced type of
* which this type is an invariant subtype. Ignore a
* given self type constraint.
*/
private ProducedType getSupertype(final TypeDeclaration dec,
TypeDeclaration selfTypeToIgnore) {
Criteria c = new Criteria() {
@Override
public boolean satisfies(TypeDeclaration type) {
return type==dec;
}
};
return getSupertype(c, new ArrayList<ProducedType>(), selfTypeToIgnore);
}
/**
* Given a predicate, return a produced type for a
* declaration satisfying the predicate, of which
* this type is an invariant subtype.
*/
ProducedType getSupertype(Criteria c) {
return getSupertype(c, new ArrayList<ProducedType>(), null);
}
static interface Criteria {
boolean satisfies(TypeDeclaration type);
}
private ProducedType getSupertype(final Criteria c, List<ProducedType> list,
final TypeDeclaration ignoringSelfType) {
if (c.satisfies(getDeclaration())) {
return qualifiedByDeclaringType();
}
if ( isWellDefined() && Util.addToSupertypes(list, this) ) {
//search for the most-specific supertype
//for the given declaration
ProducedType result = null;
ProducedType extendedType = getInternalExtendedType();
if (extendedType!=null) {
ProducedType possibleResult = extendedType.getSupertype(c, list,
ignoringSelfType);
if (possibleResult!=null) {
result = possibleResult;
}
}
for (ProducedType dst: getInternalSatisfiedTypes()) {
ProducedType possibleResult = dst.getSupertype(c, list,
ignoringSelfType);
if (possibleResult!=null && (result==null ||
possibleResult.isSubtypeOf(result, ignoringSelfType))) {
result = possibleResult;
}
}
if (getDeclaration()!=ignoringSelfType) {
ProducedType selfType = getInternalSelfType();
if (selfType!=null) {
ProducedType possibleResult = selfType.getSupertype(c, list,
ignoringSelfType);
if (possibleResult!=null && (result==null ||
possibleResult.isSubtypeOf(result, ignoringSelfType))) {
result = possibleResult;
}
}
}
final List<ProducedType> caseTypes = getInternalCaseTypes();
if (caseTypes!=null && !caseTypes.isEmpty()) {
//first find a common superclass or superinterface
//declaration that satisfies the criteria, ignoring
//type arguments for now
Criteria c2 = new Criteria() {
@Override
public boolean satisfies(TypeDeclaration type) {
if ( c.satisfies(type) ) {
for (ProducedType ct: caseTypes) {
if (ct.getSupertype(type, ignoringSelfType)==null) {
return false;
}
}
return true;
}
else {
return false;
}
}
};
ProducedType stc = caseTypes.get(0).getSupertype(c2, list,
ignoringSelfType);
if (stc!=null) {
//we found the declaration, now try to construct a
//produced type that is a true common supertype
ProducedType candidateResult = getCommonSupertype(caseTypes,
stc.getDeclaration(), ignoringSelfType);
if (candidateResult!=null && (result==null ||
candidateResult.isSubtypeOf(result, ignoringSelfType))) {
result = candidateResult;
}
}
}
return result;
}
else {
return null;
}
}
private ProducedType qualifiedByDeclaringType() {
ProducedType qt = getQualifyingType();
if (qt==null) {
return this;
}
else {
ProducedType pt = new ProducedType();
pt.setDeclaration(getDeclaration());
pt.setTypeArguments(getTypeArguments());
//replace the qualifying type with
//the supertype of the qualifying
//type that declares this nested
//type, substituting type arguments
ProducedType declaringType = qt.getSupertype((TypeDeclaration) getDeclaration().getContainer());
pt.setQualifyingType(declaringType);
return pt;
}
}
private static ProducedType getCommonSupertype(final List<ProducedType> caseTypes,
TypeDeclaration dec, final TypeDeclaration selfTypeToIgnore) {
//now try to construct a common produced
//type that is a common supertype by taking
//the type args and unioning them
List<ProducedType> args = new ArrayList<ProducedType>();
for (TypeParameter tp: dec.getTypeParameters()) {
List<ProducedType> list2 = new ArrayList<ProducedType>();
ProducedType result;
if (tp.isContravariant()) {
for (ProducedType pt: caseTypes) {
ProducedType st = pt.getSupertype(dec, selfTypeToIgnore);
if (st==null) {
return null;
}
addToIntersection(list2, st.getTypeArguments().get(tp));
}
IntersectionType it = new IntersectionType();
it.setSatisfiedTypes(list2);
result = it.canonicalize().getType();
}
else {
for (ProducedType pt: caseTypes) {
ProducedType st = pt.getSupertype(dec, selfTypeToIgnore);
if (st==null) {
return null;
}
addToUnion(list2, st.getTypeArguments().get(tp));
}
UnionType ut = new UnionType();
ut.setCaseTypes(list2);
result = ut.getType();
}
args.add(result);
}
//check that the unioned type args
//satisfy the type constraints
for (int i=0; i<args.size(); i++) {
TypeParameter tp = dec.getTypeParameters().get(i);
for (ProducedType ub: tp.getSatisfiedTypes()) {
if (!args.get(i).isSubtypeOf(ub)) {
return null;
}
}
}
//recurse to the qualifying type
ProducedType outerType;
if (dec.isMember()) {
TypeDeclaration outer = (TypeDeclaration) dec.getContainer();
List<ProducedType> list = new ArrayList<ProducedType>();
for (ProducedType pt: caseTypes) {
ProducedType st = pt.getQualifyingType().getSupertype(outer, null);
list.add(st);
}
outerType = getCommonSupertype(list, outer, null);
}
else {
outerType = null;
}
//make the resulting type
ProducedType candidateResult = dec.getProducedType(outerType, args);
//check the the resulting type is *really*
//a subtype (take variance into account)
for (ProducedType pt: caseTypes) {
if (!pt.isSubtypeOf(candidateResult)) {
return null;
}
}
return candidateResult;
}
/**
* Get the type arguments as a tuple.
*/
public List<ProducedType> getTypeArgumentList() {
List<ProducedType> lpt = new ArrayList<ProducedType>();
for (TypeParameter tp : getDeclaration().getTypeParameters()) {
lpt.add(getTypeArguments().get(tp));
}
return lpt;
}
/**
* Determine if this is a decidable supertype, i.e. if it
* obeys the restriction that types with contravariant
* type parameters may only appear in covariant positions.
*/
public List<TypeDeclaration> checkDecidability() {
List<TypeDeclaration> errors = new ArrayList<TypeDeclaration>();
for (TypeParameter tp: getDeclaration().getTypeParameters()) {
ProducedType pt = getTypeArguments().get(tp);
if (pt!=null) {
pt.checkDecidability(tp.isCovariant(), tp.isContravariant(), errors);
}
}
return errors;
}
private void checkDecidability(boolean covariant, boolean contravariant,
List<TypeDeclaration> errors) {
if (getDeclaration() instanceof TypeParameter) {
//nothing to do
}
else if (getDeclaration() instanceof UnionType) {
for (ProducedType ct: getCaseTypes()) {
ct.checkDecidability(covariant, contravariant, errors);
}
}
else if (getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: getSatisfiedTypes()) {
ct.checkDecidability(covariant, contravariant, errors);
}
}
else {
for (TypeParameter tp: getDeclaration().getTypeParameters()) {
if (!covariant && tp.isContravariant()) {
//a type with contravariant parameters appears at
//a contravariant location in satisfies / extends
errors.add(getDeclaration());
}
ProducedType pt = getTypeArguments().get(tp);
if (pt!=null) {
if (tp.isCovariant()) {
pt.checkDecidability(covariant, contravariant, errors);
}
else if (tp.isContravariant()) {
if (covariant|contravariant) {
pt.checkDecidability(!covariant, !contravariant, errors);
}
else {
//else if we are in a nonvariant position, it stays nonvariant
pt.checkDecidability(covariant, contravariant, errors);
}
}
else {
pt.checkDecidability(false, false, errors);
}
}
}
}
}
/**
* Check that this type can appear at a position,
* given the variance of the position (covariant,
* contravariant, or invariant.)
*/
public List<TypeParameter> checkVariance(boolean covariant, boolean contravariant,
Declaration declaration) {
List<TypeParameter> errors = new ArrayList<TypeParameter>();
checkVariance(covariant, contravariant, declaration, errors);
return errors;
}
private void checkVariance(boolean covariant, boolean contravariant,
Declaration declaration, List<TypeParameter> errors) {
//TODO: fix this to allow reporting multiple errors!
if (getDeclaration() instanceof TypeParameter) {
TypeParameter tp = (TypeParameter) getDeclaration();
boolean ok = tp.getDeclaration()==declaration ||
((covariant || !tp.isCovariant()) &&
(contravariant || !tp.isContravariant()));
if (!ok) {
//a covariant type parameter appears in a contravariant location, or
//a contravariant type parameter appears in a covariant location.
errors.add(tp);
}
}
else if (getDeclaration() instanceof UnionType) {
for (ProducedType ct: getCaseTypes()) {
ct.checkVariance(covariant, contravariant, declaration, errors);
}
}
else if (getDeclaration() instanceof IntersectionType) {
for (ProducedType ct: getSatisfiedTypes()) {
ct.checkVariance(covariant, contravariant, declaration, errors);
}
}
else {
for (TypeParameter tp: getDeclaration().getTypeParameters()) {
ProducedType pt = getTypeArguments().get(tp);
if (pt!=null) {
if (tp.isCovariant()) {
pt.checkVariance(covariant, contravariant, declaration, errors);
}
else if (tp.isContravariant()) {
if (covariant|contravariant) {
pt.checkVariance(!covariant, !contravariant, declaration, errors);
}
else {
//else if we are in a nonvariant position, it stays nonvariant
pt.checkVariance(covariant, contravariant, declaration, errors);
}
}
else {
pt.checkVariance(false, false, declaration, errors);
}
}
}
}
}
/**
* Is the type welldefined? Are any of its arguments
* garbage unknown types?
*/
public boolean isWellDefined() {
for (ProducedType at: getTypeArgumentList()) {
if (at==null || !at.isWellDefined() ) {
return false;
}
}
return true;
}
private ProducedType getInternalSelfType() {
ProducedType selfType = getDeclaration().getSelfType();
return selfType==null?null:selfType.substituteInternal(getTypeArguments());
}
private List<ProducedType> getInternalSatisfiedTypes() {
List<ProducedType> satisfiedTypes = new ArrayList<ProducedType>();
for (ProducedType st: getDeclaration().getSatisfiedTypes()) {
satisfiedTypes.add(st.substituteInternal(getTypeArguments()));
}
return satisfiedTypes;
}
private ProducedType getInternalExtendedType() {
ProducedType extendedType = getDeclaration().getExtendedType();
return extendedType==null?null:extendedType.substituteInternal(getTypeArguments());
}
private List<ProducedType> getInternalCaseTypes() {
if (getDeclaration().getCaseTypes()==null) {
return null;
}
else {
List<ProducedType> caseTypes = new ArrayList<ProducedType>();
for (ProducedType ct: getDeclaration().getCaseTypes()) {
caseTypes.add(ct.substituteInternal(getTypeArguments()));
}
return caseTypes;
}
}
private ProducedType getSelfType() {
ProducedType selfType = getDeclaration().getSelfType();
return selfType==null?null:selfType.substitute(getTypeArguments());
}
private List<ProducedType> getSatisfiedTypes() {
List<ProducedType> satisfiedTypes = new ArrayList<ProducedType>();
for (ProducedType st: getDeclaration().getSatisfiedTypes()) {
satisfiedTypes.add(st.substitute(getTypeArguments()));
}
return satisfiedTypes;
}
private ProducedType getExtendedType() {
ProducedType extendedType = getDeclaration().getExtendedType();
return extendedType==null?null:extendedType.substitute(getTypeArguments());
}
private List<ProducedType> getCaseTypes() {
if (getDeclaration().getCaseTypes()==null) {
return null;
}
else {
List<ProducedType> caseTypes = new ArrayList<ProducedType>();
for (ProducedType ct: getDeclaration().getCaseTypes()) {
caseTypes.add(ct.substitute(getTypeArguments()));
}
return caseTypes;
}
}
/**
* Substitutes type arguments for type parameters.
* This default strategy eliminates duplicate types
* from unions after substituting arguments.
* @author Gavin King
*/
static class Substitution {
ProducedType substitute(ProducedType pt,
Map<TypeParameter, ProducedType> substitutions) {
Declaration dec;
if (pt.getDeclaration() instanceof UnionType) {
UnionType ut = new UnionType();
List<ProducedType> types = new ArrayList<ProducedType>();
for (ProducedType ct: pt.getDeclaration().getCaseTypes()) {
addTypeToUnion(ct, substitutions, types);
}
ut.setCaseTypes(types);
dec = ut;
}
else if (pt.getDeclaration() instanceof IntersectionType) {
IntersectionType it = new IntersectionType();
List<ProducedType> types = new ArrayList<ProducedType>();
for (ProducedType ct: pt.getDeclaration().getSatisfiedTypes()) {
addTypeToIntersection(ct, substitutions, types);
}
it.setSatisfiedTypes(types);
dec = it.canonicalize();
}
else {
if (pt.getDeclaration() instanceof TypeParameter) {
ProducedType sub = substitutions.get(pt.getDeclaration());
if (sub!=null) {
return sub;
}
}
dec = pt.getDeclaration();
}
return substitutedType(dec, pt, substitutions);
}
void addTypeToUnion(ProducedType ct, Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
if (ct==null) {
types.add(null);
}
else {
addToUnion(types, substitute(ct, substitutions));
}
}
void addTypeToIntersection(ProducedType ct, Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
if (ct==null) {
types.add(null);
}
else {
addToIntersection(types, substitute(ct, substitutions));
}
}
private Map<TypeParameter, ProducedType> substitutedTypeArguments(ProducedType pt,
Map<TypeParameter, ProducedType> substitutions) {
Map<TypeParameter, ProducedType> map = new HashMap<TypeParameter, ProducedType>();
for (Map.Entry<TypeParameter, ProducedType> e: pt.getTypeArguments().entrySet()) {
if (e.getValue()!=null) {
map.put(e.getKey(), substitute(e.getValue(), substitutions));
}
}
/*ProducedType dt = pt.getDeclaringType();
if (dt!=null) {
map.putAll(substituted(dt, substitutions));
}*/
return map;
}
private ProducedType substitutedType(Declaration dec, ProducedType pt,
Map<TypeParameter, ProducedType> substitutions) {
ProducedType type = new ProducedType();
type.setDeclaration(dec);
ProducedType qt = pt.getQualifyingType();
if (qt!=null) {
type.setQualifyingType(substitute(qt, substitutions));
}
type.setTypeArguments(substitutedTypeArguments(pt, substitutions));
return type;
}
}
/**
* This special strategy for internal use by the
* containing class does not eliminate duplicate
* types from unions after substituting arguments.
* This is to avoid a stack overflow that otherwise
* results! (Determining if a union contains
* duplicates requires recursion to the argument
* substitution code via some very difficult-to-
* understand flow.)
* @author Gavin King
*/
static class InternalSubstitution extends Substitution {
private void addType(ProducedType ct,
Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
if (ct!=null) {
types.add(substitute(ct, substitutions));
}
}
@Override void addTypeToUnion(ProducedType ct,
Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
addType(ct, substitutions, types);
}
@Override void addTypeToIntersection(ProducedType ct,
Map<TypeParameter, ProducedType> substitutions,
List<ProducedType> types) {
addType(ct, substitutions, types);
}
}
@Override
public String toString() {
return "Type[" + getProducedTypeName() + "]";
}
public String getProducedTypeName() {
if (getDeclaration()==null) {
//unknown type
return null;
}
String producedTypeName = "";
if (getDeclaration().isMember()) {
producedTypeName += getQualifyingType().getProducedTypeName();
producedTypeName += ".";
}
producedTypeName += getDeclaration().getName();
if (!getTypeArgumentList().isEmpty()) {
producedTypeName += "<";
for (ProducedType t : getTypeArgumentList()) {
if (t==null) {
producedTypeName += "unknown,";
}
else {
producedTypeName += t.getProducedTypeName() + ",";
}
}
producedTypeName += ">";
producedTypeName = producedTypeName.replace(",>", ">");
}
return producedTypeName;
}
public String getProducedTypeQualifiedName() {
if (getDeclaration()==null) {
//unknown type
return null;
}
String producedTypeName = "";
if (getDeclaration().isMember()) {
producedTypeName += getQualifyingType().getProducedTypeQualifiedName();
producedTypeName += ".";
}
producedTypeName += getDeclaration().getQualifiedNameString();
if (!getTypeArgumentList().isEmpty()) {
producedTypeName += "<";
for (ProducedType t : getTypeArgumentList()) {
if (t==null) {
producedTypeName += "?,";
}
else {
producedTypeName += t.getProducedTypeQualifiedName() + ",";
}
}
producedTypeName += ">";
producedTypeName = producedTypeName.replace(",>", ">");
}
return producedTypeName;
}
}
| extra javadoc | src/com/redhat/ceylon/compiler/typechecker/model/ProducedType.java | extra javadoc |
|
Java | apache-2.0 | ac0cc5662e7dc101d194cc0de01647c782f6c460 | 0 | idubrov/nanorm,idubrov/nanorm | /**
* Copyright (C) 2008 Ivan S. Dubrov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.code.nanorm.internal;
import java.lang.reflect.Type;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import com.google.code.nanorm.Factory;
import com.google.code.nanorm.ResultCallback;
import com.google.code.nanorm.Transaction;
import com.google.code.nanorm.TypeHandlerFactory;
import com.google.code.nanorm.exceptions.DataException;
import com.google.code.nanorm.internal.config.InternalConfiguration;
import com.google.code.nanorm.internal.config.StatementConfig;
import com.google.code.nanorm.internal.introspect.Getter;
import com.google.code.nanorm.internal.introspect.Setter;
import com.google.code.nanorm.internal.mapping.result.ResultCallbackSource;
import com.google.code.nanorm.internal.mapping.result.ResultCollectorUtil;
import com.google.code.nanorm.internal.mapping.result.ResultMap;
import com.google.code.nanorm.internal.session.SessionSpi;
import com.google.code.nanorm.internal.session.SingleConnSessionSpi;
import com.google.code.nanorm.internal.type.TypeHandler;
/**
*
* @author Ivan Dubrov
* @version 1.0 27.05.2008
*/
public class FactoryImpl implements Factory, QueryDelegate {
final private ThreadLocal<SessionSpi> sessions = new ThreadLocal<SessionSpi>();
final private InternalConfiguration config;
/**
*
*/
public FactoryImpl(InternalConfiguration internalConfig) {
this.config = internalConfig;
}
/**
* @see com.google.code.nanorm.Factory#createMapper(java.lang.Class)
*/
public <T> T createMapper(Class<T> mapperClass) {
config.configure(mapperClass);
// TODO: Check we mapped this class!
/*
* return
* mapperClass.cast(Proxy.newProxyInstance(getClass().getClassLoader(),
* new Class<?>[] {mapperClass }, new MapperInvocationHandler()));
*/
return config.getIntrospectionFactory().createMapper(mapperClass, config, this);
}
public Transaction useConnection(Connection connection) {
if (connection == null) {
throw new IllegalArgumentException("Connection must not be null!");
}
if (sessions.get() != null) {
throw new IllegalStateException("Session was already started for this thread!");
}
final SessionSpi spi = new SingleConnSessionSpi(connection);
sessions.set(spi);
return new TransactionImpl(spi);
}
public Object query(StatementConfig config, Object[] args) {
// Request-scoped data
Request request = new Request(this);
// Statement fragment
BoundFragment fragment = config.getStatementBuilder().bindParameters(args);
// SQL, parameters and their types
StringBuilder sql = new StringBuilder();
List<Object> parameters = new ArrayList<Object>();
List<Type> types = new ArrayList<Type>();
// Generate everything
fragment.generate(sql, parameters, types);
SessionSpi spi = sessions.get();
if (spi == null) {
throw new IllegalStateException("Open session first!");
}
Connection conn;
try {
conn = spi.getConnection();
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
PreparedStatement st = conn.prepareStatement(sql.toString());
try {
// Map parameters and execute query
mapParameters(st, types, parameters);
Object result;
if (config.isUpdate()) {
result = st.executeUpdate();
} else {
ResultSet rs = st.executeQuery();
// If we have ResultCallback in parameters -- use it
ResultCallback<Object> callback;
if (config.getCallbackIndex() != StatementConfig.RETURN_VALUE) {
// This is OK, since we deduced result type exactly from
// this parameter
@SuppressWarnings("unchecked")
ResultCallback<Object> temp = (ResultCallback<Object>) args[config
.getCallbackIndex()];
callback = temp;
} else {
// Prepare result callback and process results
ResultGetterSetter rgs = new ResultGetterSetter();
ResultCallbackSource callbackSource = ResultCollectorUtil
.createResultCallback(config.getResultType(), rgs, rgs, config);
callback = callbackSource.forInstance(request);
}
// Iterate through the result set
ResultMap resultMapper = config.getResultMapper();
while (rs.next()) {
resultMapper.processResultSet(request, rs, callback);
}
result = request.getResult();
}
return result;
} finally {
st.close();
}
} catch (SQLException e) {
throw new DataException("SQL exception occured while executing the query!", e);
} finally {
spi.releaseConnection(conn);
}
}
private void mapParameters(PreparedStatement statement, List<Type> types,
List<Object> params) throws SQLException {
TypeHandlerFactory factory = config.getTypeHandlerFactory();
for (int i = 0; i < params.size(); ++i) {
Object item = params.get(i);
Type type = types.get(i);
TypeHandler<?> typeHandler = factory.getTypeHandler(type);
typeHandler.setParameter(statement, i + 1, item);
}
}
private static class ResultGetterSetter implements Getter, Setter {
/**
* {@inheritDoc}
*/
public Type getType() {
// TODO: Implement somehow!
return null;
}
/**
* {@inheritDoc}
*/
public Object getValue(Object instance) {
Request request = (Request) instance;
return request.getResult();
}
/**
* {@inheritDoc}
*/
public void setValue(Object instance, Object value) {
Request request = (Request) instance;
request.setResult(value);
}
}
// TODO: toString
/**
* {@link Transaction} implementation.
*/
private class TransactionImpl implements Transaction {
final private SessionSpi spi;
/**
* Constructor.
* @param spi {@link SessionSpi} implementation.
*/
private TransactionImpl(SessionSpi spi) {
this.spi = spi;
}
/**
* {@inheritDoc}
*/
public void commit() {
try {
spi.commit();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* {@inheritDoc}
*/
public void end() {
if (sessions.get() != spi) {
throw new IllegalStateException("This transaction is not bound to this thread!");
}
try {
// Remove from active sessions thread local
sessions.remove();
spi.end();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* {@inheritDoc}
*/
public void rollback() {
try {
spi.rollback();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
| src/main/java/com/google/code/nanorm/internal/FactoryImpl.java | /**
* Copyright (C) 2008 Ivan S. Dubrov
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.code.nanorm.internal;
import java.lang.reflect.Type;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.List;
import com.google.code.nanorm.Factory;
import com.google.code.nanorm.ResultCallback;
import com.google.code.nanorm.Transaction;
import com.google.code.nanorm.TypeHandlerFactory;
import com.google.code.nanorm.exceptions.DataException;
import com.google.code.nanorm.internal.config.InternalConfiguration;
import com.google.code.nanorm.internal.config.StatementConfig;
import com.google.code.nanorm.internal.introspect.Getter;
import com.google.code.nanorm.internal.introspect.Setter;
import com.google.code.nanorm.internal.mapping.result.ResultCallbackSource;
import com.google.code.nanorm.internal.mapping.result.ResultCollectorUtil;
import com.google.code.nanorm.internal.mapping.result.ResultMap;
import com.google.code.nanorm.internal.session.SessionSpi;
import com.google.code.nanorm.internal.session.SingleConnSessionSpi;
import com.google.code.nanorm.internal.type.TypeHandler;
/**
*
* @author Ivan Dubrov
* @version 1.0 27.05.2008
*/
public class FactoryImpl implements Factory, QueryDelegate {
final private ThreadLocal<SessionSpi> sessions = new ThreadLocal<SessionSpi>();
final private InternalConfiguration config;
/**
*
*/
public FactoryImpl(InternalConfiguration internalConfig) {
this.config = internalConfig;
}
/**
* @see com.google.code.nanorm.Factory#createMapper(java.lang.Class)
*/
public <T> T createMapper(Class<T> mapperClass) {
config.configure(mapperClass);
// TODO: Check we mapped this class!
/*
* return
* mapperClass.cast(Proxy.newProxyInstance(getClass().getClassLoader(),
* new Class<?>[] {mapperClass }, new MapperInvocationHandler()));
*/
return config.getIntrospectionFactory().createMapper(mapperClass, config, this);
}
public Transaction useConnection(Connection connection) {
if (connection == null) {
throw new IllegalArgumentException("Connection must not be null!");
}
if (sessions.get() != null) {
throw new IllegalStateException("Session was already started for this thread!");
}
final SessionSpi spi = new SingleConnSessionSpi(connection);
sessions.set(spi);
return new TransactionImpl(spi);
}
public Object query(StatementConfig config, Object[] args) {
// Request-scoped data
Request request = new Request(this);
// Statement fragment
BoundFragment fragment = config.getStatementBuilder().bindParameters(args);
// SQL, parameters and their types
StringBuilder sql = new StringBuilder();
List<Object> parameters = new ArrayList<Object>();
List<Type> types = new ArrayList<Type>();
// Generate everything
fragment.generate(sql, parameters, types);
SessionSpi spi = sessions.get();
if (spi == null) {
throw new IllegalStateException("Open session first!");
}
Connection conn;
try {
conn = spi.getConnection();
} catch (Exception e) {
throw new RuntimeException(e);
}
try {
PreparedStatement st = conn.prepareStatement(sql.toString());
try {
// Map parameters and execute query
mapParameters(st, types, parameters);
Object result;
if (config.isUpdate()) {
result = st.executeUpdate();
} else {
ResultSet rs = st.executeQuery();
// If we have ResultCallback in parameters -- use it
ResultCallback<Object> callback;
if (config.getCallbackIndex() != StatementConfig.RETURN_VALUE) {
// This is OK, since we deduced result type exactly from
// this parameter
@SuppressWarnings("unchecked")
ResultCallback<Object> temp = (ResultCallback<Object>) args[config
.getCallbackIndex()];
callback = temp;
} else {
// Prepare result callback and process results
ResultGetterSetter rgs = new ResultGetterSetter();
ResultCallbackSource callbackSource = ResultCollectorUtil
.createResultCallback(config.getResultType(), rgs, rgs, config);
callback = callbackSource.forInstance(request);
}
ResultMap resultMapper = config.getResultMapper();
while (rs.next()) {
resultMapper.processResultSet(request, rs, callback);
}
result = request.getResult();
}
return result;
} finally {
st.close();
}
} catch (SQLException e) {
throw new DataException("SQL exception occured while executing the query!", e);
} finally {
try {
spi.releaseConnection(conn);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
private void mapParameters(PreparedStatement statement, List<Type> types,
List<Object> params) throws SQLException {
TypeHandlerFactory factory = config.getTypeHandlerFactory();
for (int i = 0; i < params.size(); ++i) {
Object item = params.get(i);
Type type = types.get(i);
TypeHandler<?> typeHandler = factory.getTypeHandler(type);
typeHandler.setParameter(statement, i + 1, item);
}
}
private static class ResultGetterSetter implements Getter, Setter {
/**
* {@inheritDoc}
*/
public Type getType() {
// TODO: Implement somehow!
return null;
}
/**
* {@inheritDoc}
*/
public Object getValue(Object instance) {
Request request = (Request) instance;
return request.getResult();
}
/**
* {@inheritDoc}
*/
public void setValue(Object instance, Object value) {
Request request = (Request) instance;
request.setResult(value);
}
}
// TODO: toString
/**
* {@link Transaction} implementation.
*/
private class TransactionImpl implements Transaction {
final private SessionSpi spi;
/**
* Constructor.
* @param spi {@link SessionSpi} implementation.
*/
private TransactionImpl(SessionSpi spi) {
this.spi = spi;
}
/**
* {@inheritDoc}
*/
public void commit() {
try {
spi.commit();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* {@inheritDoc}
*/
public void end() {
if (sessions.get() != spi) {
throw new IllegalStateException("This transaction is not bound to this thread!");
}
try {
// Remove from active sessions thread local
sessions.remove();
spi.end();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* {@inheritDoc}
*/
public void rollback() {
try {
spi.rollback();
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
}
| minor cleanup
git-svn-id: 8c4cfb3da9bbfe2c21af1b47db7c76cc8bcad39d@51 a420aabd-eb4f-0410-91c9-6bc514024ae0
| src/main/java/com/google/code/nanorm/internal/FactoryImpl.java | minor cleanup |
|
Java | apache-2.0 | d93674fb379c1cd429d8b66159c27d66f1534168 | 0 | confluentinc/rest-utils,confluentinc/rest-utils | /*
* Copyright 2015 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.confluent.rest.metrics;
import org.glassfish.jersey.server.ContainerRequest;
import org.glassfish.jersey.server.ContainerResponse;
import org.glassfish.jersey.server.model.Resource;
import org.glassfish.jersey.server.model.ResourceMethod;
import org.glassfish.jersey.server.monitoring.ApplicationEvent;
import org.glassfish.jersey.server.monitoring.ApplicationEventListener;
import org.glassfish.jersey.server.monitoring.RequestEvent;
import org.glassfish.jersey.server.monitoring.RequestEventListener;
import java.io.FilterInputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import io.confluent.rest.annotations.PerformanceMetric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.stats.Avg;
import org.apache.kafka.common.metrics.stats.Max;
import org.apache.kafka.common.metrics.stats.Percentile;
import org.apache.kafka.common.metrics.stats.Percentiles;
import org.apache.kafka.common.metrics.stats.WindowedCount;
import org.apache.kafka.common.metrics.stats.Rate;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.utils.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Collections.emptyMap;
/**
* Jersey ResourceMethodApplicationListener that records metrics for each endpoint by listening
* for method start and finish events. It reports some common metrics for each such as rate and
* latency (average, 90th, 99th, etc).
*/
public class MetricsResourceMethodApplicationListener implements ApplicationEventListener {
private static final Logger log = LoggerFactory.getLogger(
MetricsResourceMethodApplicationListener.class);
public static final String REQUEST_TAGS_PROP_KEY = "_request_tags";
protected static final String HTTP_STATUS_CODE_TAG = "http_status_code";
protected static final String[] HTTP_STATUS_CODE_TEXT = {
"unknown", "1xx", "2xx", "3xx", "4xx", "5xx"};
private static final int PERCENTILE_NUM_BUCKETS = 200;
private static final double PERCENTILE_MAX_LATENCY_IN_MS = TimeUnit.SECONDS.toMillis(10);
private static final long SENSOR_EXPIRY_SECONDS = TimeUnit.HOURS.toSeconds(1);
private final Metrics metrics;
private final String metricGrpPrefix;
private final Map<String, String> metricTags;
Time time;
private final Map<Method, RequestScopedMetrics> methodMetrics = new HashMap<>();
public MetricsResourceMethodApplicationListener(Metrics metrics, String metricGrpPrefix,
Map<String,String> metricTags, Time time) {
super();
this.metrics = metrics;
this.metricGrpPrefix = metricGrpPrefix;
this.metricTags = (metricTags != null) ? metricTags : emptyMap();
this.time = time;
}
@Override
public void onEvent(ApplicationEvent event) {
if (event.getType() == ApplicationEvent.Type.INITIALIZATION_FINISHED) {
// Special null key is used for global stats
MethodMetrics m = new MethodMetrics(
null, null, this.metrics, metricGrpPrefix, metricTags, emptyMap());
methodMetrics.put(null, new RequestScopedMetrics(m, new ConstructionContext(this)));
for (final Resource resource : event.getResourceModel().getResources()) {
for (final ResourceMethod method : resource.getAllMethods()) {
register(method);
}
for (final Resource childResource : resource.getChildResources()) {
for (final ResourceMethod method : childResource.getAllMethods()) {
register(method);
}
}
}
}
}
private void register(ResourceMethod method) {
final Method definitionMethod = method.getInvocable().getDefinitionMethod();
if (definitionMethod.isAnnotationPresent(PerformanceMetric.class)) {
PerformanceMetric annotation = definitionMethod.getAnnotation(PerformanceMetric.class);
MethodMetrics m = new MethodMetrics(
method, annotation, metrics, metricGrpPrefix, metricTags, emptyMap());
ConstructionContext context = new ConstructionContext(method, annotation, this);
methodMetrics.put(definitionMethod, new RequestScopedMetrics(m, context));
}
}
@Override
public RequestEventListener onRequest(final RequestEvent event) {
return new MetricsRequestEventListener(methodMetrics, time);
}
private static class RequestScopedMetrics {
private final MethodMetrics methodMetrics;
private final ConstructionContext context;
private final Map<SortedMap<String, String>, MethodMetrics> requestMetrics
= new ConcurrentHashMap<>();
public RequestScopedMetrics(MethodMetrics metrics, ConstructionContext context) {
this.methodMetrics = metrics;
this.context = context;
}
public MethodMetrics metrics() {
return methodMetrics;
}
public MethodMetrics metrics(Map<String, String> requestTags) {
// The key will also be used to identify a unique sensor,
// so we want to pass the sorted tags to MethodMetrics
SortedMap<String, String> key = new TreeMap<>(requestTags);
return requestMetrics.computeIfAbsent(key, (k) ->
new MethodMetrics(context.method, context.performanceMetric, context.metrics,
context.metricGrpPrefix, context.metricTags, k));
}
}
private static class ConstructionContext {
private final ResourceMethod method;
private final PerformanceMetric performanceMetric;
private final Map<String, String> metricTags;
private final String metricGrpPrefix;
private final Metrics metrics;
public ConstructionContext(MetricsResourceMethodApplicationListener methodAppListener) {
this(null, null, methodAppListener);
}
public ConstructionContext(
ResourceMethod method,
PerformanceMetric performanceMetric,
MetricsResourceMethodApplicationListener methodAppListener
) {
this.method = method;
this.performanceMetric = performanceMetric;
this.metrics = methodAppListener.metrics;
this.metricTags = methodAppListener.metricTags;
this.metricGrpPrefix = methodAppListener.metricGrpPrefix;
}
}
private static class MethodMetrics {
private final Sensor requestSizeSensor;
private final Sensor responseSizeSensor;
private final Sensor requestLatencySensor;
private Sensor errorSensor;
private final Sensor[] errorSensorByStatus = new Sensor[HTTP_STATUS_CODE_TEXT.length];
public MethodMetrics(ResourceMethod method, PerformanceMetric annotation, Metrics metrics,
String metricGrpPrefix, Map<String, String> metricTags,
Map<String, String> requestTags) {
String metricGrpName = metricGrpPrefix + "-metrics";
// The tags will be used to generate MBean names if JmxReporter is used,
// sort to get consistent names
Map<String, String> allTags = new TreeMap<>(metricTags);
allTags.putAll(requestTags);
this.requestSizeSensor = metrics.sensor(
getName(method, annotation, "request-size", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
MetricName metricName = new MetricName(
getName(method, annotation, "request-count"), metricGrpName,
"The request count using a windowed counter", allTags);
this.requestSizeSensor.add(metricName, new WindowedCount());
metricName = new MetricName(
getName(method, annotation, "request-rate"), metricGrpName,
"The average number of HTTP requests per second.", allTags);
this.requestSizeSensor.add(metricName, new Rate(new WindowedCount()));
metricName = new MetricName(
getName(method, annotation, "request-byte-rate"), metricGrpName,
"Bytes/second of incoming requests", allTags);
this.requestSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "request-size-avg"), metricGrpName,
"The average request size in bytes", allTags);
this.requestSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "request-size-max"), metricGrpName,
"The maximum request size in bytes", allTags);
this.requestSizeSensor.add(metricName, new Max());
this.responseSizeSensor = metrics.sensor(
getName(method, annotation, "response-size", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "response-rate"), metricGrpName,
"The average number of HTTP responses per second.", allTags);
this.responseSizeSensor.add(metricName, new Rate(new WindowedCount()));
metricName = new MetricName(
getName(method, annotation, "response-byte-rate"), metricGrpName,
"Bytes/second of outgoing responses", allTags);
this.responseSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "response-size-avg"), metricGrpName,
"The average response size in bytes", allTags);
this.responseSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "response-size-max"), metricGrpName,
"The maximum response size in bytes", allTags);
this.responseSizeSensor.add(metricName, new Max());
this.requestLatencySensor = metrics.sensor(
getName(method, annotation, "request-latency", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "request-latency-avg"), metricGrpName,
"The average request latency in ms", allTags);
this.requestLatencySensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "request-latency-max"), metricGrpName,
"The maximum request latency in ms", allTags);
this.requestLatencySensor.add(metricName, new Max());
Percentiles percs = new Percentiles(Float.SIZE / 8 * PERCENTILE_NUM_BUCKETS,
0.0,
PERCENTILE_MAX_LATENCY_IN_MS,
Percentiles.BucketSizing.CONSTANT,
new Percentile(new MetricName(
getName(method, annotation, "request-latency-95"), metricGrpName,
"The 95th percentile request latency in ms", allTags), 95),
new Percentile(new MetricName(
getName(method, annotation, "request-latency-99"), metricGrpName,
"The 99th percentile request latency in ms", allTags), 99));
this.requestLatencySensor.add(percs);
for (int i = 0; i < errorSensorByStatus.length; i++) {
errorSensorByStatus[i] = metrics.sensor(
getName(method, annotation, "errors" + i, requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
SortedMap<String, String> tags = new TreeMap<>(allTags);
tags.put(HTTP_STATUS_CODE_TAG, HTTP_STATUS_CODE_TEXT[i]);
metricName = new MetricName(getName(method, annotation, "request-error-rate"),
metricGrpName,
"The average number of requests"
+ " per second that resulted in HTTP error responses with code "
+ HTTP_STATUS_CODE_TEXT[i],
tags);
errorSensorByStatus[i].add(metricName, new Rate());
metricName = new MetricName(getName(method, annotation, "request-error-count"),
metricGrpName,
"A windowed count of requests that resulted in an HTTP error response with code - "
+ HTTP_STATUS_CODE_TEXT[i], tags);
errorSensorByStatus[i].add(metricName, new WindowedCount());
}
this.errorSensor = metrics.sensor(getName(method, annotation, "errors", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "request-error-rate"),
metricGrpName,
"The average number of requests per second that resulted in HTTP error responses",
allTags);
this.errorSensor.add(metricName, new Rate());
metricName = new MetricName(
getName(method, annotation, "request-error-count"),
metricGrpName,
"A windowed count of requests that resulted in HTTP error responses",
allTags);
this.errorSensor.add(metricName, new WindowedCount());
}
/**
* Indicate that a request has finished successfully.
*/
public void finished(long requestSize, long responseSize, long latencyMs) {
requestSizeSensor.record(requestSize);
responseSizeSensor.record(responseSize);
requestLatencySensor.record(latencyMs);
}
/**
* Indicate that a request has failed with an exception.
*/
public void exception(final RequestEvent event) {
//map the http status codes down to their classes (2xx, 4xx, 5xx)
// use the containerResponse status as it has the http status after ExceptionMappers
// are applied
int idx = event.getContainerResponse() != null
? event.getContainerResponse().getStatus() / 100 : 5;
// Index 0 means "unknown" status codes.
if (idx <= 0 || idx >= HTTP_STATUS_CODE_TEXT.length) {
log.error("Unidentified exception to record metrics against", event.getException());
idx = 0;
}
errorSensorByStatus[idx].record();
errorSensor.record();
}
private static String getName(final ResourceMethod method,
final PerformanceMetric annotation, final String metric) {
return getName(method, annotation, metric, null);
}
private static String getName(final ResourceMethod method,
final PerformanceMetric annotation, final String metric,
final Map<String, String> requestTags) {
StringBuilder builder = new StringBuilder();
boolean prefixed = false;
if (annotation != null && !annotation.value().equals(PerformanceMetric.DEFAULT_NAME)) {
builder.append(annotation.value());
builder.append('.');
prefixed = true;
}
if (!prefixed && method != null) {
String className = method.getInvocable().getDefinitionMethod()
.getDeclaringClass().getSimpleName();
String methodName = method.getInvocable().getDefinitionMethod().getName();
builder.append(className);
builder.append('.');
builder.append(methodName);
builder.append('.');
}
builder.append(metric);
if (requestTags != null) {
requestTags.forEach((k, v) -> builder.append(".").append(k).append("=").append(v));
}
return builder.toString();
}
}
private static class MetricsRequestEventListener implements RequestEventListener {
private final Time time;
private final Map<Method, RequestScopedMetrics> metrics;
private long started;
private CountingInputStream wrappedRequestStream;
private CountingOutputStream wrappedResponseStream;
public MetricsRequestEventListener(final Map<Method, RequestScopedMetrics> metrics, Time time) {
this.metrics = metrics;
this.time = time;
}
@Override
public void onEvent(RequestEvent event) {
if (event.getType() == RequestEvent.Type.MATCHING_START) {
started = time.milliseconds();
final ContainerRequest request = event.getContainerRequest();
wrappedRequestStream = new CountingInputStream(request.getEntityStream());
request.setEntityStream(wrappedRequestStream);
} else if (event.getType() == RequestEvent.Type.RESP_FILTERS_START) {
final ContainerResponse response = event.getContainerResponse();
wrappedResponseStream = new CountingOutputStream(response.getEntityStream());
response.setEntityStream(wrappedResponseStream);
} else if (event.getType() == RequestEvent.Type.FINISHED) {
final long elapsed = time.milliseconds() - started;
final long requestSize;
if (wrappedRequestStream != null) {
requestSize = wrappedRequestStream.size();
} else {
requestSize = 0;
}
final long responseSize;
// nothing guarantees we always encounter an event where getContainerResponse is not null
// in the event of dispatch errors, the error response is delegated to the servlet container
if (wrappedResponseStream != null) {
responseSize = wrappedResponseStream.size();
} else {
responseSize = 0;
}
// Handle exceptions
if (event.getException() != null) {
this.metrics.get(null).metrics().exception(event);
final MethodMetrics metrics = getMethodMetrics(event);
if (metrics != null) {
metrics.exception(event);
}
}
this.metrics.get(null).metrics().finished(requestSize, responseSize, elapsed);
final MethodMetrics metrics = getMethodMetrics(event);
if (metrics != null) {
metrics.finished(requestSize, responseSize, elapsed);
}
}
}
private MethodMetrics getMethodMetrics(RequestEvent event) {
ResourceMethod method = event.getUriInfo().getMatchedResourceMethod();
if (method == null) {
return null;
}
RequestScopedMetrics metrics = this.metrics.get(method.getInvocable().getDefinitionMethod());
if (metrics == null) {
return null;
}
Object tagsObj = event.getContainerRequest().getProperty(REQUEST_TAGS_PROP_KEY);
if (tagsObj == null) {
// Method metrics without request tags don't necessarily represent method level aggregations
// e.g., when invocations of a method have both requests w/ and w/o tags
return metrics.metrics();
}
if (!(tagsObj instanceof Map<?, ?>)) {
throw new ClassCastException("Expected the value for property " + REQUEST_TAGS_PROP_KEY
+ " to be a " + Map.class + ", but it is " + tagsObj.getClass());
}
@SuppressWarnings("unchecked")
Map<String, String> tags = (Map<String, String>) tagsObj;
// we have additional tags, find the appropriate metrics holder
return metrics.metrics(tags);
}
private static class CountingInputStream extends FilterInputStream {
private long count = 0;
private long mark = 0;
public CountingInputStream(InputStream is) {
super(is);
}
public long size() {
return count;
}
@Override
public int read() throws IOException {
int b = super.read();
count++;
return b;
}
// Note that read(byte[]) for FilterInputStream calls this.read(b,0,b.length), NOT
// underlying.read(b), so accounting for those calls is handled by read(byte[],int,int).
@Override
public int read(byte[] bytes, int off, int len) throws IOException {
int nread = super.read(bytes, off, len);
if (nread > 0) {
count += nread;
}
return nread;
}
@Override
public long skip(long l) throws IOException {
long skipped = super.skip(l);
count += skipped;
return skipped;
}
@Override
public synchronized void mark(int i) {
super.mark(i);
mark = count;
}
@Override
public synchronized void reset() throws IOException {
super.reset();
count = mark;
}
}
private static class CountingOutputStream extends FilterOutputStream {
private long count = 0;
public CountingOutputStream(OutputStream os) {
super(os);
}
public long size() {
return count;
}
// Note that we override all of these even though FilterOutputStream only requires
// overriding the first to avoid doing byte-by-byte handling of the stream. Do NOT call
// super.write() for these as they will convert everything into a series of write(int)
// calls and wreck performance.
@Override
public void write(int b) throws IOException {
count++;
out.write(b);
}
@Override
public void write(byte[] bytes) throws IOException {
count += bytes.length;
out.write(bytes);
}
@Override
public void write(byte[] bytes, int off, int len) throws IOException {
count += len;
out.write(bytes, off, len);
}
}
}
}
| core/src/main/java/io/confluent/rest/metrics/MetricsResourceMethodApplicationListener.java | /*
* Copyright 2015 Confluent Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.confluent.rest.metrics;
import org.glassfish.jersey.server.ContainerRequest;
import org.glassfish.jersey.server.ContainerResponse;
import org.glassfish.jersey.server.model.Resource;
import org.glassfish.jersey.server.model.ResourceMethod;
import org.glassfish.jersey.server.monitoring.ApplicationEvent;
import org.glassfish.jersey.server.monitoring.ApplicationEventListener;
import org.glassfish.jersey.server.monitoring.RequestEvent;
import org.glassfish.jersey.server.monitoring.RequestEventListener;
import java.io.FilterInputStream;
import java.io.FilterOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import io.confluent.rest.annotations.PerformanceMetric;
import org.apache.kafka.common.MetricName;
import org.apache.kafka.common.metrics.Metrics;
import org.apache.kafka.common.metrics.stats.Avg;
import org.apache.kafka.common.metrics.stats.Max;
import org.apache.kafka.common.metrics.stats.Percentile;
import org.apache.kafka.common.metrics.stats.Percentiles;
import org.apache.kafka.common.metrics.stats.WindowedCount;
import org.apache.kafka.common.metrics.stats.Rate;
import org.apache.kafka.common.metrics.Sensor;
import org.apache.kafka.common.utils.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static java.util.Collections.emptyMap;
/**
* Jersey ResourceMethodApplicationListener that records metrics for each endpoint by listening
* for method start and finish events. It reports some common metrics for each such as rate and
* latency (average, 90th, 99th, etc).
*/
public class MetricsResourceMethodApplicationListener implements ApplicationEventListener {
private static final Logger log = LoggerFactory.getLogger(
MetricsResourceMethodApplicationListener.class);
public static final String REQUEST_TAGS_PROP_KEY = "_request_tags";
protected static final String HTTP_STATUS_CODE_TAG = "http_status_code";
protected static final String[] HTTP_STATUS_CODE_TEXT = {
"unknown", "1xx", "2xx", "3xx", "4xx", "5xx"};
private static final int PERCENTILE_NUM_BUCKETS = 200;
private static final double PERCENTILE_MAX_LATENCY_IN_MS = TimeUnit.SECONDS.toMillis(10);
private static final long SENSOR_EXPIRY_SECONDS = TimeUnit.HOURS.toSeconds(1);
private final Metrics metrics;
private final String metricGrpPrefix;
private final Map<String, String> metricTags;
Time time;
private final Map<Method, RequestScopedMetrics> methodMetrics = new HashMap<>();
public MetricsResourceMethodApplicationListener(Metrics metrics, String metricGrpPrefix,
Map<String,String> metricTags, Time time) {
super();
this.metrics = metrics;
this.metricGrpPrefix = metricGrpPrefix;
this.metricTags = (metricTags != null) ? metricTags : emptyMap();
this.time = time;
}
@Override
public void onEvent(ApplicationEvent event) {
if (event.getType() == ApplicationEvent.Type.INITIALIZATION_FINISHED) {
// Special null key is used for global stats
MethodMetrics m = new MethodMetrics(
null, null, this.metrics, metricGrpPrefix, metricTags, emptyMap());
methodMetrics.put(null, new RequestScopedMetrics(m, new ConstructionContext(this)));
for (final Resource resource : event.getResourceModel().getResources()) {
for (final ResourceMethod method : resource.getAllMethods()) {
register(method);
}
for (final Resource childResource : resource.getChildResources()) {
for (final ResourceMethod method : childResource.getAllMethods()) {
register(method);
}
}
}
}
}
private void register(ResourceMethod method) {
final Method definitionMethod = method.getInvocable().getDefinitionMethod();
if (definitionMethod.isAnnotationPresent(PerformanceMetric.class)) {
PerformanceMetric annotation = definitionMethod.getAnnotation(PerformanceMetric.class);
MethodMetrics m = new MethodMetrics(
method, annotation, metrics, metricGrpPrefix, metricTags, emptyMap());
ConstructionContext context = new ConstructionContext(method, annotation, this);
methodMetrics.put(definitionMethod, new RequestScopedMetrics(m, context));
}
}
@Override
public RequestEventListener onRequest(final RequestEvent event) {
return new MetricsRequestEventListener(methodMetrics, time);
}
private static class RequestScopedMetrics {
private final MethodMetrics methodMetrics;
private final ConstructionContext context;
private final Map<SortedMap<String, String>, MethodMetrics> requestMetrics
= new ConcurrentHashMap<>();
public RequestScopedMetrics(MethodMetrics metrics, ConstructionContext context) {
this.methodMetrics = metrics;
this.context = context;
}
public MethodMetrics metrics() {
return methodMetrics;
}
public MethodMetrics metrics(Map<String, String> requestTags) {
// The key will also be used to identify a unique sensor,
// so we want to pass the sorted tags to MethodMetrics
SortedMap<String, String> key = new TreeMap<>(requestTags);
return requestMetrics.computeIfAbsent(key, (k) ->
new MethodMetrics(context.method, context.performanceMetric, context.metrics,
context.metricGrpPrefix, context.metricTags, k));
}
}
private static class ConstructionContext {
private final ResourceMethod method;
private final PerformanceMetric performanceMetric;
private final Map<String, String> metricTags;
private final String metricGrpPrefix;
private final Metrics metrics;
public ConstructionContext(MetricsResourceMethodApplicationListener methodAppListener) {
this(null, null, methodAppListener);
}
public ConstructionContext(
ResourceMethod method,
PerformanceMetric performanceMetric,
MetricsResourceMethodApplicationListener methodAppListener
) {
this.method = method;
this.performanceMetric = performanceMetric;
this.metrics = methodAppListener.metrics;
this.metricTags = methodAppListener.metricTags;
this.metricGrpPrefix = methodAppListener.metricGrpPrefix;
}
}
private static class MethodMetrics {
private final Sensor requestSizeSensor;
private final Sensor responseSizeSensor;
private final Sensor requestLatencySensor;
private Sensor errorSensor;
private final Sensor[] errorSensorByStatus = new Sensor[HTTP_STATUS_CODE_TEXT.length];
public MethodMetrics(ResourceMethod method, PerformanceMetric annotation, Metrics metrics,
String metricGrpPrefix, Map<String, String> metricTags,
Map<String, String> requestTags) {
String metricGrpName = metricGrpPrefix + "-metrics";
// The tags will be used to generate MBean names if JmxReporter is used,
// sort to get consistent names
Map<String, String> allTags = new TreeMap<>(metricTags);
allTags.putAll(requestTags);
this.requestSizeSensor = metrics.sensor(
getName(method, annotation, "request-size", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
MetricName metricName = new MetricName(
getName(method, annotation, "request-count"), metricGrpName,
"The request count using a windowed counter", allTags);
this.requestSizeSensor.add(metricName, new WindowedCount());
metricName = new MetricName(
getName(method, annotation, "request-rate"), metricGrpName,
"The average number of HTTP requests per second.", allTags);
this.requestSizeSensor.add(metricName, new Rate(new WindowedCount()));
metricName = new MetricName(
getName(method, annotation, "request-byte-rate"), metricGrpName,
"Bytes/second of incoming requests", allTags);
this.requestSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "request-size-avg"), metricGrpName,
"The average request size in bytes", allTags);
this.requestSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "request-size-max"), metricGrpName,
"The maximum request size in bytes", allTags);
this.requestSizeSensor.add(metricName, new Max());
this.responseSizeSensor = metrics.sensor(
getName(method, annotation, "response-size", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "response-rate"), metricGrpName,
"The average number of HTTP responses per second.", allTags);
this.responseSizeSensor.add(metricName, new Rate(new WindowedCount()));
metricName = new MetricName(
getName(method, annotation, "response-byte-rate"), metricGrpName,
"Bytes/second of outgoing responses", allTags);
this.responseSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "response-size-avg"), metricGrpName,
"The average response size in bytes", allTags);
this.responseSizeSensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "response-size-max"), metricGrpName,
"The maximum response size in bytes", allTags);
this.responseSizeSensor.add(metricName, new Max());
this.requestLatencySensor = metrics.sensor(
getName(method, annotation, "request-latency", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "request-latency-avg"), metricGrpName,
"The average request latency in ms", allTags);
this.requestLatencySensor.add(metricName, new Avg());
metricName = new MetricName(
getName(method, annotation, "request-latency-max"), metricGrpName,
"The maximum request latency in ms", allTags);
this.requestLatencySensor.add(metricName, new Max());
Percentiles percs = new Percentiles(Float.SIZE / 8 * PERCENTILE_NUM_BUCKETS,
0.0,
PERCENTILE_MAX_LATENCY_IN_MS,
Percentiles.BucketSizing.CONSTANT,
new Percentile(new MetricName(
getName(method, annotation, "request-latency-95"), metricGrpName,
"The 95th percentile request latency in ms", allTags), 95),
new Percentile(new MetricName(
getName(method, annotation, "request-latency-99"), metricGrpName,
"The 99th percentile request latency in ms", allTags), 99));
this.requestLatencySensor.add(percs);
for (int i = 0; i < errorSensorByStatus.length; i++) {
errorSensorByStatus[i] = metrics.sensor(
getName(method, annotation, "errors" + i, requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
SortedMap<String, String> tags = new TreeMap<>(allTags);
tags.put(HTTP_STATUS_CODE_TAG, HTTP_STATUS_CODE_TEXT[i]);
metricName = new MetricName(getName(method, annotation, "request-error-rate"),
metricGrpName,
"The average number of requests"
+ " per second that resulted in HTTP error responses with code "
+ HTTP_STATUS_CODE_TEXT[i],
tags);
errorSensorByStatus[i].add(metricName, new Rate());
metricName = new MetricName(getName(method, annotation, "request-error-count"),
metricGrpName,
"A windowed count of requests that resulted in an HTTP error response with code - "
+ HTTP_STATUS_CODE_TEXT[i], tags);
errorSensorByStatus[i].add(metricName, new WindowedCount());
}
this.errorSensor = metrics.sensor(getName(method, annotation, "errors", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "request-error-rate"),
metricGrpName,
"The average number of requests per second that resulted in HTTP error responses",
allTags);
this.errorSensor.add(metricName, new Rate());
this.errorSensor = metrics.sensor(getName(method, annotation, "errors-count", requestTags),
null, SENSOR_EXPIRY_SECONDS, Sensor.RecordingLevel.INFO, (Sensor[]) null);
metricName = new MetricName(
getName(method, annotation, "request-error-count"),
metricGrpName,
"A windowed count of requests that resulted in HTTP error responses",
allTags);
this.errorSensor.add(metricName, new WindowedCount());
}
/**
* Indicate that a request has finished successfully.
*/
public void finished(long requestSize, long responseSize, long latencyMs) {
requestSizeSensor.record(requestSize);
responseSizeSensor.record(responseSize);
requestLatencySensor.record(latencyMs);
}
/**
* Indicate that a request has failed with an exception.
*/
public void exception(final RequestEvent event) {
//map the http status codes down to their classes (2xx, 4xx, 5xx)
// use the containerResponse status as it has the http status after ExceptionMappers
// are applied
int idx = event.getContainerResponse() != null
? event.getContainerResponse().getStatus() / 100 : 5;
// Index 0 means "unknown" status codes.
if (idx <= 0 || idx >= HTTP_STATUS_CODE_TEXT.length) {
log.error("Unidentified exception to record metrics against", event.getException());
idx = 0;
}
errorSensorByStatus[idx].record();
errorSensor.record();
}
private static String getName(final ResourceMethod method,
final PerformanceMetric annotation, final String metric) {
return getName(method, annotation, metric, null);
}
private static String getName(final ResourceMethod method,
final PerformanceMetric annotation, final String metric,
final Map<String, String> requestTags) {
StringBuilder builder = new StringBuilder();
boolean prefixed = false;
if (annotation != null && !annotation.value().equals(PerformanceMetric.DEFAULT_NAME)) {
builder.append(annotation.value());
builder.append('.');
prefixed = true;
}
if (!prefixed && method != null) {
String className = method.getInvocable().getDefinitionMethod()
.getDeclaringClass().getSimpleName();
String methodName = method.getInvocable().getDefinitionMethod().getName();
builder.append(className);
builder.append('.');
builder.append(methodName);
builder.append('.');
}
builder.append(metric);
if (requestTags != null) {
requestTags.forEach((k, v) -> builder.append(".").append(k).append("=").append(v));
}
return builder.toString();
}
}
private static class MetricsRequestEventListener implements RequestEventListener {
private final Time time;
private final Map<Method, RequestScopedMetrics> metrics;
private long started;
private CountingInputStream wrappedRequestStream;
private CountingOutputStream wrappedResponseStream;
public MetricsRequestEventListener(final Map<Method, RequestScopedMetrics> metrics, Time time) {
this.metrics = metrics;
this.time = time;
}
@Override
public void onEvent(RequestEvent event) {
if (event.getType() == RequestEvent.Type.MATCHING_START) {
started = time.milliseconds();
final ContainerRequest request = event.getContainerRequest();
wrappedRequestStream = new CountingInputStream(request.getEntityStream());
request.setEntityStream(wrappedRequestStream);
} else if (event.getType() == RequestEvent.Type.RESP_FILTERS_START) {
final ContainerResponse response = event.getContainerResponse();
wrappedResponseStream = new CountingOutputStream(response.getEntityStream());
response.setEntityStream(wrappedResponseStream);
} else if (event.getType() == RequestEvent.Type.FINISHED) {
final long elapsed = time.milliseconds() - started;
final long requestSize;
if (wrappedRequestStream != null) {
requestSize = wrappedRequestStream.size();
} else {
requestSize = 0;
}
final long responseSize;
// nothing guarantees we always encounter an event where getContainerResponse is not null
// in the event of dispatch errors, the error response is delegated to the servlet container
if (wrappedResponseStream != null) {
responseSize = wrappedResponseStream.size();
} else {
responseSize = 0;
}
// Handle exceptions
if (event.getException() != null) {
this.metrics.get(null).metrics().exception(event);
final MethodMetrics metrics = getMethodMetrics(event);
if (metrics != null) {
metrics.exception(event);
}
}
this.metrics.get(null).metrics().finished(requestSize, responseSize, elapsed);
final MethodMetrics metrics = getMethodMetrics(event);
if (metrics != null) {
metrics.finished(requestSize, responseSize, elapsed);
}
}
}
private MethodMetrics getMethodMetrics(RequestEvent event) {
ResourceMethod method = event.getUriInfo().getMatchedResourceMethod();
if (method == null) {
return null;
}
RequestScopedMetrics metrics = this.metrics.get(method.getInvocable().getDefinitionMethod());
if (metrics == null) {
return null;
}
Object tagsObj = event.getContainerRequest().getProperty(REQUEST_TAGS_PROP_KEY);
if (tagsObj == null) {
// Method metrics without request tags don't necessarily represent method level aggregations
// e.g., when invocations of a method have both requests w/ and w/o tags
return metrics.metrics();
}
if (!(tagsObj instanceof Map<?, ?>)) {
throw new ClassCastException("Expected the value for property " + REQUEST_TAGS_PROP_KEY
+ " to be a " + Map.class + ", but it is " + tagsObj.getClass());
}
@SuppressWarnings("unchecked")
Map<String, String> tags = (Map<String, String>) tagsObj;
// we have additional tags, find the appropriate metrics holder
return metrics.metrics(tags);
}
private static class CountingInputStream extends FilterInputStream {
private long count = 0;
private long mark = 0;
public CountingInputStream(InputStream is) {
super(is);
}
public long size() {
return count;
}
@Override
public int read() throws IOException {
int b = super.read();
count++;
return b;
}
// Note that read(byte[]) for FilterInputStream calls this.read(b,0,b.length), NOT
// underlying.read(b), so accounting for those calls is handled by read(byte[],int,int).
@Override
public int read(byte[] bytes, int off, int len) throws IOException {
int nread = super.read(bytes, off, len);
if (nread > 0) {
count += nread;
}
return nread;
}
@Override
public long skip(long l) throws IOException {
long skipped = super.skip(l);
count += skipped;
return skipped;
}
@Override
public synchronized void mark(int i) {
super.mark(i);
mark = count;
}
@Override
public synchronized void reset() throws IOException {
super.reset();
count = mark;
}
}
private static class CountingOutputStream extends FilterOutputStream {
private long count = 0;
public CountingOutputStream(OutputStream os) {
super(os);
}
public long size() {
return count;
}
// Note that we override all of these even though FilterOutputStream only requires
// overriding the first to avoid doing byte-by-byte handling of the stream. Do NOT call
// super.write() for these as they will convert everything into a series of write(int)
// calls and wreck performance.
@Override
public void write(int b) throws IOException {
count++;
out.write(b);
}
@Override
public void write(byte[] bytes) throws IOException {
count += bytes.length;
out.write(bytes);
}
@Override
public void write(byte[] bytes, int off, int len) throws IOException {
count += len;
out.write(bytes, off, len);
}
}
}
}
| Do not create a new sensor for error count
| core/src/main/java/io/confluent/rest/metrics/MetricsResourceMethodApplicationListener.java | Do not create a new sensor for error count |
|
Java | apache-2.0 | 52a8a61d7a11eae1bdb88657d0307ad90d706118 | 0 | ricepanda/rice-git2,ricepanda/rice-git2,kuali/rice-playground,ricepanda/rice-git3,ricepanda/rice-git2,ricepanda/rice-git3,kuali/rice-playground,kuali/rice-playground,ricepanda/rice-git2,ricepanda/rice-git3,ricepanda/rice-git3,kuali/rice-playground | /**
* Copyright 2005-2011 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.service.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.kuali.rice.core.api.util.RiceKeyConstants;
import org.kuali.rice.krad.datadictionary.DataDictionary;
import org.kuali.rice.krad.datadictionary.DataObjectEntry;
import org.kuali.rice.krad.datadictionary.validation.Address;
import org.kuali.rice.krad.datadictionary.validation.Company;
import org.kuali.rice.krad.datadictionary.validation.Employee;
import org.kuali.rice.krad.datadictionary.validation.ErrorLevel;
import org.kuali.rice.krad.datadictionary.validation.Person;
import org.kuali.rice.krad.datadictionary.validation.constraint.provider.CollectionDefinitionConstraintProvider;
import org.kuali.rice.krad.datadictionary.validation.processor.MustOccurConstraintProcessor;
import org.kuali.rice.krad.datadictionary.validation.result.ConstraintValidationResult;
import org.kuali.rice.krad.datadictionary.validation.result.DictionaryValidationResult;
import org.kuali.rice.krad.service.DataDictionaryService;
import org.kuali.test.KRADTestCase;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
*
* @author Kuali Rice Team ([email protected])
*/
public class DictionaryValidationServiceImplTest {
ClassPathXmlApplicationContext context;
private DictionaryValidationServiceImpl service;
DataDictionary dataDictionary = new DataDictionary();
protected DataObjectEntry addressEntry;
private Address validLondonAddress = new Address("8129 Maiden Lane", "", "London", "", "SE1 0P3", "UK", null);
private Address validUSAddress = new Address("893 Presidential Ave", "Suite 800", "Washington", "DC", "12031", "USA", null);
private Address invalidUSAddress = new Address("893 Presidential Ave", "Suite 800", "Washington", "", "92342-123456", "USA", null);
private Address noZipNoCityUSAddress = new Address("893 Presidential Ave", "Suite 800", null, "DC", null, "USA", null);
private Address validNonDCUSAddress = new Address("89 11th Street", "Suite 800", "Seattle", "WA", "", "USA", null);
private Address invalidDCUSAddress = new Address("89 Presidential Ave", "Suite 800", "Washington", "DC", "12031", "USA", null);
private Address invalidHKAddress = new Address("182 Lin Pai Road", "", "Hong Kong", "N.T.", "", "CN", null);
@Before
public void setUp() throws Exception {
//super.setUp();
context = new ClassPathXmlApplicationContext("classpath:DictionaryValidationServiceSpringBeans.xml");
service = (DictionaryValidationServiceImpl)context.getBean("dictionaryValidationService");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/bo/datadictionary/DataDictionaryBaseTypes.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/kns/bo/datadictionary/DataDictionaryBaseTypes.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Company.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Address.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Employee.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Person.xml");
dataDictionary.parseDataDictionaryConfigurationFiles(false);
addressEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Address");
}
@Test
public void testValidNonUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(validLondonAddress, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
}
@Test
public void testValidUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(validUSAddress, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
}
@Test
public void testInvalidUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(invalidUSAddress, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "country", RiceKeyConstants.ERROR_REQUIRES_FIELD));
Assert.assertTrue(hasError(dictionaryValidationResult, "postalCode", RiceKeyConstants.ERROR_OUT_OF_RANGE));
}
@Test
public void testValidNonDCAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(validNonDCUSAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
}
@Test
public void testInvalidDCAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(invalidDCUSAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "street1", RiceKeyConstants.ERROR_INVALID_FORMAT));
}
@Test
public void testNoStateNoZipUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(noZipNoCityUSAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
if (dictionaryValidationResult.getNumberOfErrors() > 0) {
for (Iterator<ConstraintValidationResult> iterator = dictionaryValidationResult.iterator() ; iterator.hasNext() ;) {
ConstraintValidationResult constraintValidationResult = iterator.next();
if (constraintValidationResult.getStatus().getLevel() >= ErrorLevel.WARN.getLevel()) {
// The top level error should be an occurs error
Assert.assertEquals(ErrorLevel.ERROR, constraintValidationResult.getStatus());
Assert.assertEquals("error.occurs", constraintValidationResult.getErrorKey());
// It should have two children
List<ConstraintValidationResult> children = constraintValidationResult.getChildren();
Assert.assertNotNull(children);
Assert.assertEquals(2, children.size());
// The first child should have it's own child
ConstraintValidationResult child1 = children.get(0);
ConstraintValidationResult child2 = children.get(1);
Assert.assertEquals("error.requiresField", child1.getErrorKey());
Assert.assertArrayEquals(new String[] { "postalCode" }, child1.getErrorParameters());
List<ConstraintValidationResult> grandchildren = child2.getChildren();
Assert.assertNotNull(grandchildren);
Assert.assertEquals(2, grandchildren.size());
ConstraintValidationResult grandchild1 = grandchildren.get(0);
Assert.assertEquals(ErrorLevel.ERROR, grandchild1.getStatus());
Assert.assertEquals("error.requiresField", grandchild1.getErrorKey());
Assert.assertArrayEquals(new String[] { "city" }, grandchild1.getErrorParameters());
ConstraintValidationResult grandchild2 = grandchildren.get(1);
Assert.assertEquals(ErrorLevel.OK, grandchild2.getStatus());
Assert.assertEquals(new MustOccurConstraintProcessor().getName(), grandchild2.getConstraintName());
}
}
}
}
@Test
public void testSimpleCaseConstraints() throws IOException{
DictionaryValidationResult dictionaryValidationResult = service.validate(invalidHKAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "street2", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
}
@Test
public void testRequiredNestedAttribute() throws IOException{
DataDictionaryService dataDictionaryService = new DataDictionaryServiceImpl(dataDictionary);
service.setDataDictionaryService(dataDictionaryService);
//Get object entries from dictionary
DataObjectEntry addressEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Address");
DataObjectEntry companyEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Company");
//Validate object entries
addressEntry.completeValidation();
companyEntry.completeValidation();
Company acmeCompany = new Company();
//Validate empty Company object
DictionaryValidationResult dictionaryValidationResult;
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//Main address is required this should result in error
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "mainAddress", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
//Adding an invalid mainAddress for company
Address acmeMainAddress = new Address();
acmeCompany.setMainAddress(acmeMainAddress);
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//This should result in missing country error
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "mainAddress.country", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
Assert.assertTrue(hasError(dictionaryValidationResult, "mainAddress", RiceKeyConstants.ERROR_OCCURS));
//Set items to valid address
acmeMainAddress.setCountry("US");
acmeMainAddress.setPostalCode("11111");
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//This should result in no error
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
//Test Nested Attribute Within Nested Attribute, and nested property override
Employee companyContact = new Employee();
acmeCompany.setMainContact(companyContact);
Person mainContactPerson = new Person();
companyContact.setEmployeeDetails(mainContactPerson);
companyContact.setEmployeeId("companyContact");
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "mainContact.employeeDetails.gender", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
}
@Test
public void testCollectionConstraints() throws IOException{
DataDictionaryService dataDictionaryService = new DataDictionaryServiceImpl(dataDictionary);
service.setDataDictionaryService(dataDictionaryService);
DataObjectEntry companyEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Company");
//Add collection constraint provider so constraints on collections get processed
service.getConstraintProviders().add(new CollectionDefinitionConstraintProvider());
Company acmeCompany = new Company();
Address acmeMainAddress = new Address();
acmeMainAddress.setCountry("US");
acmeMainAddress.setPostalCode("11111");
acmeCompany.setMainAddress(acmeMainAddress);
DictionaryValidationResult dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//Company requires at least two employees
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "employees", RiceKeyConstants.ERROR_QUANTITY_RANGE));
Assert.assertTrue(hasError(dictionaryValidationResult, "slogans", RiceKeyConstants.ERROR_MIN_OCCURS));
//Add required employes and revalidate
Employee employee1 = new Employee();
Person person = new Person();
person.setBirthDate(new Date());
person.setGender("M");
employee1.setEmployeeDetails(person);
employee1.setEmployeeId("123456789");
List<Employee> employees = new ArrayList<Employee>();
employees.add(employee1);
acmeCompany.setEmployees(employees);
List<String> slogans = new ArrayList<String>();
slogans.add("Slogan One");
acmeCompany.setSlogans(slogans);
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "employees", RiceKeyConstants.ERROR_QUANTITY_RANGE));
Assert.assertTrue(hasError(dictionaryValidationResult, "slogans", RiceKeyConstants.ERROR_MIN_OCCURS));
//Add two invalid employees, this should result in size constraint, and invalid employee errors
employees.add(new Employee());
employees.add(new Employee());
slogans.add("Slogan Two");
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
Assert.assertEquals(5, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[1].employeeId", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[1].employeeDetails", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[2].employeeId", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[2].employeeDetails", RiceKeyConstants.ERROR_REQUIRED_NO_LABEL));
}
protected boolean hasError(DictionaryValidationResult dvr, String attributeName, String errorKey){
Iterator<ConstraintValidationResult> dvrIterator = dvr.iterator();
boolean containsError = false;
while (dvrIterator.hasNext() && !containsError){
ConstraintValidationResult cvr = dvrIterator.next();
if (attributeName.contains("[")){
containsError = attributeName.equals(cvr.getAttributePath()) && errorKey.equals(cvr.getErrorKey()) && ErrorLevel.ERROR==cvr.getStatus();
} else {
containsError = attributeName.equals(cvr.getAttributeName()) && errorKey.equals(cvr.getErrorKey()) && ErrorLevel.ERROR==cvr.getStatus();
}
}
return containsError;
}
}
| it/krad/src/test/java/org/kuali/rice/krad/service/impl/DictionaryValidationServiceImplTest.java | /**
* Copyright 2005-2011 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.service.impl;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.kuali.rice.core.api.util.RiceKeyConstants;
import org.kuali.rice.krad.datadictionary.DataDictionary;
import org.kuali.rice.krad.datadictionary.DataObjectEntry;
import org.kuali.rice.krad.datadictionary.validation.Address;
import org.kuali.rice.krad.datadictionary.validation.Company;
import org.kuali.rice.krad.datadictionary.validation.Employee;
import org.kuali.rice.krad.datadictionary.validation.ErrorLevel;
import org.kuali.rice.krad.datadictionary.validation.Person;
import org.kuali.rice.krad.datadictionary.validation.constraint.provider.CollectionDefinitionConstraintProvider;
import org.kuali.rice.krad.datadictionary.validation.processor.MustOccurConstraintProcessor;
import org.kuali.rice.krad.datadictionary.validation.result.ConstraintValidationResult;
import org.kuali.rice.krad.datadictionary.validation.result.DictionaryValidationResult;
import org.kuali.rice.krad.service.DataDictionaryService;
import org.kuali.test.KRADTestCase;
import org.springframework.context.support.ClassPathXmlApplicationContext;
/**
*
* @author Kuali Rice Team ([email protected])
*/
public class DictionaryValidationServiceImplTest {
ClassPathXmlApplicationContext context;
private DictionaryValidationServiceImpl service;
DataDictionary dataDictionary = new DataDictionary();
protected DataObjectEntry addressEntry;
private Address validLondonAddress = new Address("8129 Maiden Lane", "", "London", "", "SE1 0P3", "UK", null);
private Address validUSAddress = new Address("893 Presidential Ave", "Suite 800", "Washington", "DC", "12031", "USA", null);
private Address invalidUSAddress = new Address("893 Presidential Ave", "Suite 800", "Washington", "", "92342-123456", "USA", null);
private Address noZipNoCityUSAddress = new Address("893 Presidential Ave", "Suite 800", null, "DC", null, "USA", null);
private Address validNonDCUSAddress = new Address("89 11th Street", "Suite 800", "Seattle", "WA", "", "USA", null);
private Address invalidDCUSAddress = new Address("89 Presidential Ave", "Suite 800", "Washington", "DC", "12031", "USA", null);
private Address invalidHKAddress = new Address("182 Lin Pai Road", "", "Hong Kong", "N.T.", "", "CN", null);
@Before
public void setUp() throws Exception {
//super.setUp();
context = new ClassPathXmlApplicationContext("classpath:DictionaryValidationServiceSpringBeans.xml");
service = (DictionaryValidationServiceImpl)context.getBean("dictionaryValidationService");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/bo/datadictionary/DataDictionaryBaseTypes.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/kns/bo/datadictionary/DataDictionaryBaseTypes.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Company.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Address.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Employee.xml");
dataDictionary.addConfigFileLocation("classpath:org/kuali/rice/krad/test/datadictionary/validation/Person.xml");
dataDictionary.parseDataDictionaryConfigurationFiles(false);
addressEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Address");
}
@Test
public void testValidNonUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(validLondonAddress, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
}
@Test
public void testValidUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(validUSAddress, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
}
@Test
public void testInvalidUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(invalidUSAddress, "org.kuali.rice.kns.datadictionary.validation.MockAddress", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "country", RiceKeyConstants.ERROR_REQUIRES_FIELD));
Assert.assertTrue(hasError(dictionaryValidationResult, "postalCode", RiceKeyConstants.ERROR_OUT_OF_RANGE));
}
@Test
public void testValidNonDCAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(validNonDCUSAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
}
@Test
public void testInvalidDCAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(invalidDCUSAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "street1", RiceKeyConstants.ERROR_INVALID_FORMAT));
}
@Test
public void testNoStateNoZipUSAddress() {
DictionaryValidationResult dictionaryValidationResult = service.validate(noZipNoCityUSAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
if (dictionaryValidationResult.getNumberOfErrors() > 0) {
for (Iterator<ConstraintValidationResult> iterator = dictionaryValidationResult.iterator() ; iterator.hasNext() ;) {
ConstraintValidationResult constraintValidationResult = iterator.next();
if (constraintValidationResult.getStatus().getLevel() >= ErrorLevel.WARN.getLevel()) {
// The top level error should be an occurs error
Assert.assertEquals(ErrorLevel.ERROR, constraintValidationResult.getStatus());
Assert.assertEquals("error.occurs", constraintValidationResult.getErrorKey());
// It should have two children
List<ConstraintValidationResult> children = constraintValidationResult.getChildren();
Assert.assertNotNull(children);
Assert.assertEquals(2, children.size());
// The first child should have it's own child
ConstraintValidationResult child1 = children.get(0);
ConstraintValidationResult child2 = children.get(1);
Assert.assertEquals("error.requiresField", child1.getErrorKey());
Assert.assertArrayEquals(new String[] { "postalCode" }, child1.getErrorParameters());
List<ConstraintValidationResult> grandchildren = child2.getChildren();
Assert.assertNotNull(grandchildren);
Assert.assertEquals(2, grandchildren.size());
ConstraintValidationResult grandchild1 = grandchildren.get(0);
Assert.assertEquals(ErrorLevel.ERROR, grandchild1.getStatus());
Assert.assertEquals("error.requiresField", grandchild1.getErrorKey());
Assert.assertArrayEquals(new String[] { "city" }, grandchild1.getErrorParameters());
ConstraintValidationResult grandchild2 = grandchildren.get(1);
Assert.assertEquals(ErrorLevel.OK, grandchild2.getStatus());
Assert.assertEquals(new MustOccurConstraintProcessor().getName(), grandchild2.getConstraintName());
}
}
}
}
@Test
public void testSimpleCaseConstraints() throws IOException{
DictionaryValidationResult dictionaryValidationResult = service.validate(invalidHKAddress, "org.kuali.rice.krad.datadictionary.validation.Address", addressEntry, true);
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfWarnings());
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "street2", RiceKeyConstants.ERROR_REQUIRED));
}
@Test
public void testRequiredNestedAttribute() throws IOException{
DataDictionaryService dataDictionaryService = new DataDictionaryServiceImpl(dataDictionary);
service.setDataDictionaryService(dataDictionaryService);
//Get object entries from dictionary
DataObjectEntry addressEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Address");
DataObjectEntry companyEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Company");
//Validate object entries
addressEntry.completeValidation();
companyEntry.completeValidation();
Company acmeCompany = new Company();
//Validate empty Company object
DictionaryValidationResult dictionaryValidationResult;
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//Main address is required this should result in error
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "mainAddress", RiceKeyConstants.ERROR_REQUIRED));
//Adding an invalid mainAddress for company
Address acmeMainAddress = new Address();
acmeCompany.setMainAddress(acmeMainAddress);
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//This should result in missing country error
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "mainAddress.country", RiceKeyConstants.ERROR_REQUIRED));
Assert.assertTrue(hasError(dictionaryValidationResult, "mainAddress", RiceKeyConstants.ERROR_OCCURS));
//Set items to valid address
acmeMainAddress.setCountry("US");
acmeMainAddress.setPostalCode("11111");
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//This should result in no error
Assert.assertEquals(0, dictionaryValidationResult.getNumberOfErrors());
//Test Nested Attribute Within Nested Attribute, and nested property override
Employee companyContact = new Employee();
acmeCompany.setMainContact(companyContact);
Person mainContactPerson = new Person();
companyContact.setEmployeeDetails(mainContactPerson);
companyContact.setEmployeeId("companyContact");
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
Assert.assertEquals(1, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "mainContact.employeeDetails.gender", RiceKeyConstants.ERROR_REQUIRED));
}
@Test
public void testCollectionConstraints() throws IOException{
DataDictionaryService dataDictionaryService = new DataDictionaryServiceImpl(dataDictionary);
service.setDataDictionaryService(dataDictionaryService);
DataObjectEntry companyEntry = dataDictionary.getDataObjectEntry("org.kuali.rice.krad.datadictionary.validation.Company");
//Add collection constraint provider so constraints on collections get processed
service.getConstraintProviders().add(new CollectionDefinitionConstraintProvider());
Company acmeCompany = new Company();
Address acmeMainAddress = new Address();
acmeMainAddress.setCountry("US");
acmeMainAddress.setPostalCode("11111");
acmeCompany.setMainAddress(acmeMainAddress);
DictionaryValidationResult dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
//Company requires at least two employees
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "employees", RiceKeyConstants.ERROR_QUANTITY_RANGE));
Assert.assertTrue(hasError(dictionaryValidationResult, "slogans", RiceKeyConstants.ERROR_MIN_OCCURS));
//Add required employes and revalidate
Employee employee1 = new Employee();
Person person = new Person();
person.setBirthDate(new Date());
person.setGender("M");
employee1.setEmployeeDetails(person);
employee1.setEmployeeId("123456789");
List<Employee> employees = new ArrayList<Employee>();
employees.add(employee1);
acmeCompany.setEmployees(employees);
List<String> slogans = new ArrayList<String>();
slogans.add("Slogan One");
acmeCompany.setSlogans(slogans);
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
Assert.assertEquals(2, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "employees", RiceKeyConstants.ERROR_QUANTITY_RANGE));
Assert.assertTrue(hasError(dictionaryValidationResult, "slogans", RiceKeyConstants.ERROR_MIN_OCCURS));
//Add two invalid employees, this should result in size constraint, and invalid employee errors
employees.add(new Employee());
employees.add(new Employee());
slogans.add("Slogan Two");
dictionaryValidationResult = service.validate(acmeCompany, "org.kuali.rice.krad.datadictionary.validation.Company",companyEntry, true);
Assert.assertEquals(5, dictionaryValidationResult.getNumberOfErrors());
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[1].employeeId", RiceKeyConstants.ERROR_REQUIRED));
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[1].employeeDetails", RiceKeyConstants.ERROR_REQUIRED));
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[2].employeeId", RiceKeyConstants.ERROR_REQUIRED));
Assert.assertTrue(hasError(dictionaryValidationResult, "employees[2].employeeDetails", RiceKeyConstants.ERROR_REQUIRED));
}
protected boolean hasError(DictionaryValidationResult dvr, String attributeName, String errorKey){
Iterator<ConstraintValidationResult> dvrIterator = dvr.iterator();
boolean containsError = false;
while (dvrIterator.hasNext() && !containsError){
ConstraintValidationResult cvr = dvrIterator.next();
if (attributeName.contains("[")){
containsError = attributeName.equals(cvr.getAttributePath()) && errorKey.equals(cvr.getErrorKey()) && ErrorLevel.ERROR==cvr.getStatus();
} else {
containsError = attributeName.equals(cvr.getAttributeName()) && errorKey.equals(cvr.getErrorKey()) && ErrorLevel.ERROR==cvr.getStatus();
}
}
return containsError;
}
}
| KULRICE-5884 - Updated test to expect the new error message used by ExistenceConstraintProcessor
git-svn-id: 2a5d2b5a02908a0c4ba7967b726d8c4198d1b9ed@31063 7a7aa7f6-c479-11dc-97e2-85a2497f191d
| it/krad/src/test/java/org/kuali/rice/krad/service/impl/DictionaryValidationServiceImplTest.java | KULRICE-5884 - Updated test to expect the new error message used by ExistenceConstraintProcessor |
|
Java | apache-2.0 | 5f8d6b3206ed273d8f839c60c459b592a98c2965 | 0 | chenyhtech/vCampusServer,shellqiqi/vCampusServer | package seu.dao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import seu.domain.Library;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Date;
@Repository
public class LibraryDao{
private JdbcTemplate jdbcTemplate;
@Autowired
public void setJdbcTemplate(JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
}
//管理员增加图书
public int insertBook(final Library book) {
final String sql = "INSERT INTO Library(BookId,BookName,studentId,startDate) VALUES(?,?,?,?)";
Object[] params = new Object[]{book.getBookId(),book.getBookName(),book.getStudentId(),book.getStartDate() };
return jdbcTemplate.update(sql, params);
}
//管理员增加图书
public int insertBook(final int bookId,final String bookName ){
final String sql = "INSERT INTO Library(BookId,BookName,startDate) VALUES(?,?,?)";
Object[] params = new Object[]{bookId,bookName, null };
return jdbcTemplate.update(sql, params);
}
public int deleteBookByBookId(final int id) {
final String sql = "DELETE FROM Library WHERE BookId = ?";
Object[] params = new Object[]{id};
return jdbcTemplate.update(sql, params);
}
//学生借书
public int updateBookByBookId(final int bookId, final int studentId,final Date date){
final String sql = "UPDATE Library SET StudentID = ? , StartDate = ? WHERE BookId = ?";
Object[] params = new Object[]{studentId,date,bookId};
return jdbcTemplate.update(sql, params);
}
//学生还书
public int updateBookByBookId(final int bookId){
final String sql = "UPDATE Library SET StudentID = ? , StartDate = ? WHERE BookId = ?";
Object[] params = new Object[]{null,null,bookId};
return jdbcTemplate.update(sql, params);
}
public Library queryBookByBookId(final int id) {
final String sql = "SELECT * FROM Library WHERE BookId = ?";
Object[] params = new Object[]{id};
return jdbcTemplate.queryForObject(sql, params, new LibraryMapper());
}
public List<Library> queryBooksByStudentId(final int studentId){
final String sql = "SELECT * FROM Library WHERE StudentID = ?";
Object[] params = new Object[]{studentId};
return jdbcTemplate.query(sql, params, new LibraryMapper());
}
public List<Library> queryBooksByBookName(final String bookName){
final String sql = "SELECT * FROM Library WHERE BookName = ?";
Object[] params = new Object[]{bookName};
return jdbcTemplate.query(sql, params, new LibraryMapper());
}
public List<Library> queryAll() {
final String sql = "SELECT * FROM Library";
return jdbcTemplate.query(sql, new LibraryMapper());
}
private static final class LibraryMapper implements RowMapper<Library> {
@Override
public Library mapRow(ResultSet rs, int rowNum) throws SQLException {
return new Library(
rs.getInt("bookId"),
rs.getString("bookName"),
rs.getInt("studentId"),
rs.getDate("startDate")
);
}
}
}
| src/main/java/seu/dao/LibraryDao.java | package seu.dao;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.stereotype.Repository;
import seu.domain.Library;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.List;
import java.util.Date;
@Repository
public class LibraryDao{
private JdbcTemplate jdbcTemplate;
@Autowired
public void setJdbcTemplate(JdbcTemplate jdbcTemplate) {
this.jdbcTemplate = jdbcTemplate;
}
public int insertBook(final Library book) {
final String sql = "INSERT INTO Library(BookId,BookName,studentId,startDate) VALUES(?,?,?,?)";
Object[] params = new Object[]{book.getBookId(),book.getBookName(),book.getStudentId(),book.getStartDate() };
return jdbcTemplate.update(sql, params);
}
public int insertBook(final int bookId,final String bookName ){
final String sql = "INSERT INTO Library(BookId,BookName,startDate) VALUES(?,?,?)";
Object[] params = new Object[]{bookId,bookName, null };
return jdbcTemplate.update(sql, params);
}
public int deleteBookByBookId(final int id) {
final String sql = "DELETE FROM Library WHERE BookId = ?";
Object[] params = new Object[]{id};
return jdbcTemplate.update(sql, params);
}
//学生借书
public int updateBookByBookId(final int bookId, final int studentId,final Date date){
final String sql = "UPDATE Library SET StudentID = ? , StartDate = ? WHERE BookId = ?";
Object[] params = new Object[]{studentId,date,bookId};
return jdbcTemplate.update(sql, params);
}
//学生还书
public int updateBookByBookId(final int bookId){
final String sql = "UPDATE Library SET StudentID = ? , StartDate = ? WHERE BookId = ?";
Object[] params = new Object[]{null,null,bookId};
return jdbcTemplate.update(sql, params);
}
public Library queryBookByBookId(final int id) {
final String sql = "SELECT * FROM Library WHERE BookId = ?";
Object[] params = new Object[]{id};
return jdbcTemplate.queryForObject(sql, params, new LibraryMapper());
}
public List<Library> queryBooksByStudentId(final int studentId){
final String sql = "SELECT * FROM Library WHERE StudentID = ?";
Object[] params = new Object[]{studentId};
return jdbcTemplate.query(sql, params, new LibraryMapper());
}
public List<Library> queryBooksByBookName(final String bookName){
final String sql = "SELECT * FROM Library WHERE BookName = ?";
Object[] params = new Object[]{bookName};
return jdbcTemplate.query(sql, params, new LibraryMapper());
}
public List<Library> queryAll() {
final String sql = "SELECT * FROM Library";
return jdbcTemplate.query(sql, new LibraryMapper());
}
private static final class LibraryMapper implements RowMapper<Library> {
@Override
public Library mapRow(ResultSet rs, int rowNum) throws SQLException {
return new Library(
rs.getInt("bookId"),
rs.getString("bookName"),
rs.getInt("studentId"),
rs.getDate("startDate")
);
}
}
}
| update
| src/main/java/seu/dao/LibraryDao.java | update |
|
Java | apache-2.0 | 7d6a919194d93f458bf54b3a47dd20fa677a65b0 | 0 | wcm-io-qa/wcm-io-qa-galenium,wcm-io-qa/wcm-io-qa-galenium,wcm-io-qa/wcm-io-qa-galenium | /*
* #%L
* wcm.io
* %%
* Copyright (C) 2017 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.qa.galenium.differences.generic;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import io.wcm.qa.galenium.differences.base.Difference;
import io.wcm.qa.galenium.differences.base.Differences;
import io.wcm.qa.galenium.differences.util.DifferenceUtil;
/**
* Holds dimensions of potential differences for samples and supplies them either as file path or property key.
*/
public class MutableDifferences implements Differences {
private Collection<Difference> differences = new ArrayList<Difference>();
/**
* See {@link ArrayList#add(Object)}
* @param difference to be appended
* @return true if adding changed anything
*/
public boolean add(Difference difference) {
if (difference == null) {
throw new IllegalArgumentException("cannot add null to MutableDifferences.");
}
return getDifferences().add(difference);
}
/**
* See {@link ArrayList#addAll(Collection)}
* @param toBeAppended Collection of differences to be appended
* @return if differences changed after appending
*/
public boolean addAll(Collection<? extends Difference> toBeAppended) {
boolean changed = false;
for (Difference difference : toBeAppended) {
if (add(difference)) {
changed = true;
}
}
return changed;
}
/**
* @param toBeAppended Collection of differences to be appended
* @return if differences changed after appending
*/
public boolean addAll(Iterable<? extends Difference> toBeAppended) {
boolean changed = false;
for (Difference difference : toBeAppended) {
if (add(difference)) {
changed = true;
}
}
return changed;
}
@Override
public String asFilePath() {
return joinTagsWith("/");
}
@Override
public String asPropertyKey() {
return joinTagsWith(".");
}
/**
* See {@link ArrayList#clear()}
*/
public void clear() {
getDifferences().clear();
}
public Collection<Difference> getDifferences() {
return differences;
}
@Override
public Iterator<Difference> iterator() {
return getDifferences().iterator();
}
/**
* See {@link ArrayList#remove(Object)}
* @param difference to be removed
* @return true if difference existed and was removed
*/
public boolean remove(Difference difference) {
return getDifferences().remove(difference);
}
@Override
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("differences: [");
stringBuilder.append(joinNamesWith("]|["));
stringBuilder.append("], asPropertyKey: '");
stringBuilder.append(asPropertyKey());
stringBuilder.append("', asFilePath: '");
stringBuilder.append(asFilePath());
stringBuilder.append("'");
return stringBuilder.toString();
}
private String joinNamesWith(String separator) {
return DifferenceUtil.joinNamesWith(getDifferences(), separator);
}
protected String joinTagsWith(String separator) {
return DifferenceUtil.joinTagsWith(getDifferences(), separator);
}
}
| modules/differences/src/main/java/io/wcm/qa/galenium/differences/generic/MutableDifferences.java | /*
* #%L
* wcm.io
* %%
* Copyright (C) 2017 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.qa.galenium.differences.generic;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Iterator;
import org.apache.commons.collections4.CollectionUtils;
import io.wcm.qa.galenium.differences.base.Difference;
import io.wcm.qa.galenium.differences.base.Differences;
import io.wcm.qa.galenium.differences.util.DifferenceUtil;
/**
* Holds dimensions of potential differences for samples and supplies them either as file path or property key.
*/
public class MutableDifferences implements Differences {
private Collection<Difference> differences = new ArrayList<Difference>();
/**
* See {@link ArrayList#add(Object)}
* @param difference to be appended
* @return true if adding changed anything
*/
public boolean add(Difference difference) {
return getDifferences().add(difference);
}
/**
* See {@link ArrayList#addAll(Collection)}
* @param toBeAppended Collection of differences to be appended
* @return if differences changed after appending
*/
public boolean addAll(Collection<? extends Difference> toBeAppended) {
return getDifferences().addAll(toBeAppended);
}
/**
* @param toBeAppended Collection of differences to be appended
* @return if differences changed after appending
*/
public boolean addAll(Iterable<? extends Difference> toBeAppended) {
return CollectionUtils.addAll(getDifferences(), toBeAppended);
}
@Override
public String asFilePath() {
return joinTagsWith("/");
}
@Override
public String asPropertyKey() {
return joinTagsWith(".");
}
/**
* See {@link ArrayList#clear()}
*/
public void clear() {
getDifferences().clear();
}
public Collection<Difference> getDifferences() {
return differences;
}
@Override
public Iterator<Difference> iterator() {
return getDifferences().iterator();
}
/**
* See {@link ArrayList#remove(Object)}
* @param difference to be removed
* @return true if difference existed and was removed
*/
public boolean remove(Difference difference) {
return getDifferences().remove(difference);
}
@Override
public String toString() {
StringBuilder stringBuilder = new StringBuilder();
stringBuilder.append("differences: [");
stringBuilder.append(joinNamesWith("]|["));
stringBuilder.append("], asPropertyKey: '");
stringBuilder.append(asPropertyKey());
stringBuilder.append("', asFilePath: '");
stringBuilder.append(asFilePath());
stringBuilder.append("'");
return stringBuilder.toString();
}
private String joinNamesWith(String separator) {
return DifferenceUtil.joinNamesWith(getDifferences(), separator);
}
protected String joinTagsWith(String separator) {
return DifferenceUtil.joinTagsWith(getDifferences(), separator);
}
}
| make MutableDifferences more robust and fail loud earlier
| modules/differences/src/main/java/io/wcm/qa/galenium/differences/generic/MutableDifferences.java | make MutableDifferences more robust and fail loud earlier |
|
Java | apache-2.0 | 92bd55894ec7f4cd790c0820c91d3faeab6bdc4c | 0 | Panupong54/youtubehtml,jittagornp/cpe4235,jittagornp/cpe4235,metavin007/HTMLandCSS,gamekapeak/cpe4235,kamonppob/CPE4235,metavin007/HTMLandCSS,jittagornp/cpe4235,gamekapeak/cpe4235,gamekapeak/cpe4235,kamonppob/CPE4235,Panupong54/youtubehtml,metavin007/HTMLandCSS,kamonppob/CPE4235,Panupong54/youtubehtml | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.blogspot.na5cent.resourcelocal.model;
import java.io.Serializable;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
*
* @author anonymous
*/
@Entity
@Table(name = "employees")
public class Employee implements Serializable {
@Id
@Column(name = "employee_id")
private Integer id;
@Column(name = "first_name")
private String firstName;
@Column(name = "last_name")
private String lastName;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
@Override
public int hashCode() {
int hash = 5;
hash = 41 * hash + Objects.hashCode(this.id);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Employee other = (Employee) obj;
if (!Objects.equals(this.id, other.id)) {
return false;
}
return true;
}
}
| mvc/orm/resource-local/src/main/java/com/blogspot/na5cent/resourcelocal/model/Employee.java | /*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package com.blogspot.na5cent.resourcelocal.model;
import java.io.Serializable;
import java.util.Objects;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
/**
*
* @author anonymous
*/
@Entity
@Table(name = "employees")
public class Employee implements Serializable {
@Id
@Column(name = "employee_id")
private Integer id;
@Column(name = "first_name")
private String firstName;
@Column(name = "last_name")
private String lastName;
private String email;
@Column(name = "phone_number")
private String phoneNumber;
public Integer getId() {
return id;
}
public void setId(Integer id) {
this.id = id;
}
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
@Override
public int hashCode() {
int hash = 5;
hash = 41 * hash + Objects.hashCode(this.id);
return hash;
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final Employee other = (Employee) obj;
if (!Objects.equals(this.id, other.id)) {
return false;
}
return true;
}
}
| remove unnecessary attribute | mvc/orm/resource-local/src/main/java/com/blogspot/na5cent/resourcelocal/model/Employee.java | remove unnecessary attribute |
|
Java | apache-2.0 | b79faa3a2943102967869e4c8066dfe29cf862b7 | 0 | ocpsoft/prettytime | /*
* Copyright 2012 <a href="mailto:[email protected]">Lincoln Baxter, III</a>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ocpsoft.prettytime.jsf;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.faces.convert.ConverterException;
import org.ocpsoft.prettytime.PrettyTime;
import java.io.Serializable;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
public class PrettyTimeConverter implements Converter, Serializable
{
private static final long serialVersionUID = 7690470362440868260L;
private static final int MAX_CACHE_SIZE = 20;
// Cache PrettyTime per locale. LRU cache to prevent memory leak.
private static final Map<Locale, PrettyTime> PRETTY_TIME_LOCALE_MAP = new LinkedHashMap<Locale, PrettyTime>(MAX_CACHE_SIZE + 1, 1.1F, true)
{
private static final long serialVersionUID = 5093634937930600141L;
@Override
protected boolean removeEldestEntry(Map.Entry<Locale, PrettyTime> eldest)
{
return size() > MAX_CACHE_SIZE;
}
};
public Object getAsObject(final FacesContext context, final UIComponent comp, final String value)
{
throw new ConverterException("Does not yet support converting String to Date");
}
public String getAsString(final FacesContext context, final UIComponent comp, final Object value)
{
if (value instanceof Date)
{
// Use locale of current viewer.
Locale locale = context.getViewRoot().getLocale();
PrettyTime prettyTime;
synchronized (PRETTY_TIME_LOCALE_MAP)
{
prettyTime = PRETTY_TIME_LOCALE_MAP.computeIfAbsent(locale, PrettyTime::new);
}
return prettyTime.format((Date) value);
}
throw new ConverterException("May only be used to convert java.util.Date objects. Got: " + (value != null ? value.getClass() : "null"));
}
}
| jsf/src/main/java/org/ocpsoft/prettytime/jsf/PrettyTimeConverter.java | /*
* Copyright 2012 <a href="mailto:[email protected]">Lincoln Baxter, III</a>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.ocpsoft.prettytime.jsf;
import javax.faces.component.UIComponent;
import javax.faces.context.FacesContext;
import javax.faces.convert.Converter;
import javax.faces.convert.ConverterException;
import org.ocpsoft.prettytime.PrettyTime;
import java.io.Serializable;
import java.util.Date;
import java.util.LinkedHashMap;
import java.util.Locale;
import java.util.Map;
public class PrettyTimeConverter implements Converter, Serializable
{
private static final long serialVersionUID = 7690470362440868260L;
private static final int MAX_CACHE_SIZE = 20;
// Cache PrettyTime per locale. LRU cache to prevent memory leak.
private static final Map<Locale, PrettyTime> PRETTY_TIME_LOCALE_MAP = new LinkedHashMap<Locale, PrettyTime>(MAX_CACHE_SIZE + 1, 1.1F, true)
{
private static final long serialVersionUID = 5093634937930600141L;
@Override
protected boolean removeEldestEntry(Map.Entry<Locale, PrettyTime> eldest)
{
return size() > MAX_CACHE_SIZE;
}
};
public Object getAsObject(final FacesContext context, final UIComponent comp, final String value)
{
throw new ConverterException("Does not yet support converting String to Date");
}
public String getAsString(final FacesContext context, final UIComponent comp, final Object value)
{
if (value instanceof Date)
{
// Use locale of current viewer.
Locale locale = context.getViewRoot().getLocale();
synchronized (PRETTY_TIME_LOCALE_MAP)
{
return PRETTY_TIME_LOCALE_MAP.computeIfAbsent(locale, PrettyTime::new)
.format((Date) value);
}
}
throw new ConverterException("May only be used to convert java.util.Date objects. Got: " + (value != null ? value.getClass() : "null"));
}
}
| Fix #224 move format out of synchronized block
| jsf/src/main/java/org/ocpsoft/prettytime/jsf/PrettyTimeConverter.java | Fix #224 move format out of synchronized block |
|
Java | apache-2.0 | 0b9a5f5ed34d5562364a6dd61f6ab9c0220d1b8b | 0 | mohanaraosv/commons-pool,mohanaraosv/commons-pool,mohanaraosv/commons-pool | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool.impl;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.TreeMap;
import java.util.TimerTask;
import org.apache.commons.pool.BaseKeyedObjectPool;
import org.apache.commons.pool.KeyedObjectPool;
import org.apache.commons.pool.KeyedPoolableObjectFactory;
/**
* A configurable <code>KeyedObjectPool</code> implementation.
* <p>
* When coupled with the appropriate {@link KeyedPoolableObjectFactory},
* <code>GenericKeyedObjectPool</code> provides robust pooling functionality for
* keyed objects. A <code>GenericKeyedObjectPool</code> can be viewed as a map
* of pools, keyed on the (unique) key values provided to the
* {@link #preparePool preparePool}, {@link #addObject addObject} or
* {@link #borrowObject borrowObject} methods. Each time a new key value is
* provided to one of these methods, a new pool is created under the given key
* to be managed by the containing <code>GenericKeyedObjectPool.</code>
* </p>
* <p>A <code>GenericKeyedObjectPool</code> provides a number of configurable
* parameters:</p>
* <ul>
* <li>
* {@link #setMaxActive maxActive} controls the maximum number of objects
* (per key) that can be borrowed from the pool at one time. When
* non-positive, there is no limit to the number of objects per key.
* When {@link #setMaxActive maxActive} is exceeded, the keyed pool is said
* to be exhausted. The default setting for this parameter is 8.
* </li>
* <li>
* {@link #setMaxTotal maxTotal} sets a global limit on the number of objects
* that can be in circulation (active or idle) within the combined set of
* pools. When non-positive, there is no limit to the total number of
* objects in circulation. When {@link #setMaxTotal maxTotal} is exceeded,
* all keyed pools are exhausted. When <code>maxTotal</code> is set to a
* positive value and {@link #borrowObject borrowObject} is invoked
* when at the limit with no idle instances available, an attempt is made to
* create room by clearing the oldest 15% of the elements from the keyed
* pools. The default setting for this parameter is -1 (no limit).
* </li>
* <li>
* {@link #setMaxIdle maxIdle} controls the maximum number of objects that can
* sit idle in the pool (per key) at any time. When negative, there
* is no limit to the number of objects that may be idle per key. The
* default setting for this parameter is 8.
* </li>
* <li>
* {@link #setWhenExhaustedAction whenExhaustedAction} specifies the
* behavior of the {@link #borrowObject borrowObject} method when a keyed
* pool is exhausted:
* <ul>
* <li>
* When {@link #setWhenExhaustedAction whenExhaustedAction} is
* {@link #WHEN_EXHAUSTED_FAIL}, {@link #borrowObject borrowObject} will throw
* a {@link NoSuchElementException}
* </li>
* <li>
* When {@link #setWhenExhaustedAction whenExhaustedAction} is
* {@link #WHEN_EXHAUSTED_GROW}, {@link #borrowObject borrowObject} will create a new
* object and return it (essentially making {@link #setMaxActive maxActive}
* meaningless.)
* </li>
* <li>
* When {@link #setWhenExhaustedAction whenExhaustedAction}
* is {@link #WHEN_EXHAUSTED_BLOCK}, {@link #borrowObject borrowObject} will block
* (invoke {@link Object#wait() wait} until a new or idle object is available.
* If a positive {@link #setMaxWait maxWait}
* value is supplied, the {@link #borrowObject borrowObject} will block for at
* most that many milliseconds, after which a {@link NoSuchElementException}
* will be thrown. If {@link #setMaxWait maxWait} is non-positive,
* the {@link #borrowObject borrowObject} method will block indefinitely.
* </li>
* </ul>
* The default <code>whenExhaustedAction</code> setting is
* {@link #WHEN_EXHAUSTED_BLOCK}.
* </li>
* <li>
* When {@link #setTestOnBorrow testOnBorrow} is set, the pool will
* attempt to validate each object before it is returned from the
* {@link #borrowObject borrowObject} method. (Using the provided factory's
* {@link KeyedPoolableObjectFactory#validateObject validateObject} method.)
* Objects that fail to validate will be dropped from the pool, and a
* different object will be borrowed. The default setting for this parameter
* is <code>false.</code>
* </li>
* <li>
* When {@link #setTestOnReturn testOnReturn} is set, the pool will
* attempt to validate each object before it is returned to the pool in the
* {@link #returnObject returnObject} method. (Using the provided factory's
* {@link KeyedPoolableObjectFactory#validateObject validateObject}
* method.) Objects that fail to validate will be dropped from the pool.
* The default setting for this parameter is <code>false.</code>
* </li>
* </ul>
* <p>
* Optionally, one may configure the pool to examine and possibly evict objects
* as they sit idle in the pool and to ensure that a minimum number of idle
* objects is maintained for each key. This is performed by an
* "idle object eviction" thread, which runs asynchronously. Caution should be
* used when configuring this optional feature. Eviction runs require an
* exclusive synchronization lock on the pool, so if they run too frequently
* and / or incur excessive latency when creating, destroying or validating
* object instances, performance issues may result. The idle object eviction
* thread may be configured using the following attributes:
* <ul>
* <li>
* {@link #setTimeBetweenEvictionRunsMillis timeBetweenEvictionRunsMillis}
* indicates how long the eviction thread should sleep before "runs" of examining
* idle objects. When non-positive, no eviction thread will be launched. The
* default setting for this parameter is -1 (i.e., by default, idle object
* eviction is disabled).
* </li>
* <li>
* {@link #setMinEvictableIdleTimeMillis minEvictableIdleTimeMillis}
* specifies the minimum amount of time that an object may sit idle in the
* pool before it is eligible for eviction due to idle time. When
* non-positive, no object will be dropped from the pool due to idle time
* alone. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting
* for this parameter is 30 minutes.
* </li>
* <li>
* {@link #setTestWhileIdle testWhileIdle} indicates whether or not idle
* objects should be validated using the factory's
* {@link KeyedPoolableObjectFactory#validateObject validateObject} method
* during idle object eviction runs. Objects that fail to validate will be
* dropped from the pool. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting
* for this parameter is <code>false.</code>
* </li>
* <li>
* {@link #setMinIdle minIdle} sets a target value for the minimum number of
* idle objects (per key) that should always be available. If this parameter
* is set to a positive number and
* <code>timeBetweenEvictionRunsMillis > 0,</code> each time the idle object
* eviction thread runs, it will try to create enough idle instances so that
* there will be <code>minIdle</code> idle instances available under each
* key. This parameter is also used by {@link #preparePool preparePool}
* if <code>true</code> is provided as that method's
* <code>populateImmediately</code> parameter. The default setting for this
* parameter is 0.
* </li>
* </ul>
* <p>
* The pools can be configured to behave as LIFO queues with respect to idle
* objects - always returning the most recently used object from the pool,
* or as FIFO queues, where borrowObject always returns the oldest object
* in the idle object pool.
* <ul>
* <li>
* {@link #setLifo <i>Lifo</i>}
* determines whether or not the pools return idle objects in
* last-in-first-out order. The default setting for this parameter is
* <code>true.</code>
* </li>
* </ul>
* <p>
* GenericKeyedObjectPool is not usable without a {@link KeyedPoolableObjectFactory}. A
* non-<code>null</code> factory must be provided either as a constructor argument
* or via a call to {@link #setFactory setFactory} before the pool is used.
* </p>
* @see GenericObjectPool
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision$ $Date$
* @since Pool 1.0
*/
public class GenericKeyedObjectPool extends BaseKeyedObjectPool implements KeyedObjectPool {
//--- public constants -------------------------------------------
/**
* A "when exhausted action" type indicating that when the pool is
* exhausted (i.e., the maximum number of active objects has
* been reached), the {@link #borrowObject}
* method should fail, throwing a {@link NoSuchElementException}.
* @see #WHEN_EXHAUSTED_BLOCK
* @see #WHEN_EXHAUSTED_GROW
* @see #setWhenExhaustedAction
*/
public static final byte WHEN_EXHAUSTED_FAIL = 0;
/**
* A "when exhausted action" type indicating that when the pool
* is exhausted (i.e., the maximum number
* of active objects has been reached), the {@link #borrowObject}
* method should block until a new object is available, or the
* {@link #getMaxWait maximum wait time} has been reached.
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #setMaxWait
* @see #getMaxWait
* @see #setWhenExhaustedAction
*/
public static final byte WHEN_EXHAUSTED_BLOCK = 1;
/**
* A "when exhausted action" type indicating that when the pool is
* exhausted (i.e., the maximum number
* of active objects has been reached), the {@link #borrowObject}
* method should simply create a new object anyway.
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #setWhenExhaustedAction
*/
public static final byte WHEN_EXHAUSTED_GROW = 2;
/**
* The default cap on the number of idle instances (per key) in the pool.
* @see #getMaxIdle
* @see #setMaxIdle
*/
public static final int DEFAULT_MAX_IDLE = 8;
/**
* The default cap on the total number of active instances (per key)
* from the pool.
* @see #getMaxActive
* @see #setMaxActive
*/
public static final int DEFAULT_MAX_ACTIVE = 8;
/**
* The default cap on the the overall maximum number of objects that can
* exist at one time.
* @see #getMaxTotal
* @see #setMaxTotal
*/
public static final int DEFAULT_MAX_TOTAL = -1;
/**
* The default "when exhausted action" for the pool.
* @see #WHEN_EXHAUSTED_BLOCK
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #setWhenExhaustedAction
*/
public static final byte DEFAULT_WHEN_EXHAUSTED_ACTION = WHEN_EXHAUSTED_BLOCK;
/**
* The default maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #getWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
* @see #getMaxWait
* @see #setMaxWait
*/
public static final long DEFAULT_MAX_WAIT = -1L;
/**
* The default "test on borrow" value.
* @see #getTestOnBorrow
* @see #setTestOnBorrow
*/
public static final boolean DEFAULT_TEST_ON_BORROW = false;
/**
* The default "test on return" value.
* @see #getTestOnReturn
* @see #setTestOnReturn
*/
public static final boolean DEFAULT_TEST_ON_RETURN = false;
/**
* The default "test while idle" value.
* @see #getTestWhileIdle
* @see #setTestWhileIdle
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public static final boolean DEFAULT_TEST_WHILE_IDLE = false;
/**
* The default "time between eviction runs" value.
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public static final long DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS = -1L;
/**
* The default number of objects to examine per run in the
* idle object evictor.
* @see #getNumTestsPerEvictionRun
* @see #setNumTestsPerEvictionRun
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public static final int DEFAULT_NUM_TESTS_PER_EVICTION_RUN = 3;
/**
* The default value for {@link #getMinEvictableIdleTimeMillis}.
* @see #getMinEvictableIdleTimeMillis
* @see #setMinEvictableIdleTimeMillis
*/
public static final long DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS = 1000L * 60L * 30L;
/**
* The default minimum level of idle objects in the pool.
* @since Pool 1.3
* @see #setMinIdle
* @see #getMinIdle
*/
public static final int DEFAULT_MIN_IDLE = 0;
/**
* The default LIFO status. True means that borrowObject returns the
* most recently used ("last in") idle object in a pool (if there are
* idle instances available). False means that pools behave as FIFO
* queues - objects are taken from idle object pools in the order that
* they are returned.
* @see #setLifo
*/
public static final boolean DEFAULT_LIFO = true;
//--- constructors -----------------------------------------------
/**
* Create a new <code>GenericKeyedObjectPool</code> with no factory.
*
* @see #GenericKeyedObjectPool(KeyedPoolableObjectFactory)
* @see #setFactory(KeyedPoolableObjectFactory)
*/
public GenericKeyedObjectPool() {
this(null,DEFAULT_MAX_ACTIVE,DEFAULT_WHEN_EXHAUSTED_ACTION,DEFAULT_MAX_WAIT,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory) {
this(factory,DEFAULT_MAX_ACTIVE,DEFAULT_WHEN_EXHAUSTED_ACTION,DEFAULT_MAX_WAIT,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param config a non-<code>null</code> {@link GenericKeyedObjectPool.Config} describing the configuration
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, GenericKeyedObjectPool.Config config) {
this(factory,config.maxActive,config.whenExhaustedAction,config.maxWait,config.maxIdle,config.maxTotal, config.minIdle,config.testOnBorrow,config.testOnReturn,config.timeBetweenEvictionRunsMillis,config.numTestsPerEvictionRun,config.minEvictableIdleTimeMillis,config.testWhileIdle,config.lifo);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive) {
this(factory,maxActive,DEFAULT_WHEN_EXHAUSTED_ACTION,DEFAULT_MAX_WAIT,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait) {
this(factory,maxActive,whenExhaustedAction,maxWait,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, boolean testOnBorrow, boolean testOnReturn) {
this(factory,maxActive,whenExhaustedAction,maxWait,DEFAULT_MAX_IDLE,testOnBorrow,testOnReturn,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle) {
this(factory,maxActive,whenExhaustedAction,maxWait,maxIdle,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #getMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, boolean testOnBorrow, boolean testOnReturn) {
this(factory,maxActive,whenExhaustedAction,maxWait,maxIdle,testOnBorrow,testOnReturn,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle) {
this(factory, maxActive, whenExhaustedAction, maxWait, maxIdle, GenericKeyedObjectPool.DEFAULT_MAX_TOTAL, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param maxTotal the maximum number of objects that can exists at one time (see {@link #setMaxTotal})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, int maxTotal, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle) {
this(factory, maxActive, whenExhaustedAction, maxWait, maxIdle, maxTotal, GenericKeyedObjectPool.DEFAULT_MIN_IDLE, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param maxTotal the maximum number of objects that can exists at one time (see {@link #setMaxTotal})
* @param minIdle the minimum number of idle objects to have in the pool at any one time (see {@link #setMinIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
* @since Pool 1.3
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, int maxTotal, int minIdle, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle) {
this(factory, maxActive, whenExhaustedAction, maxWait, maxIdle, maxTotal, minIdle, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle, DEFAULT_LIFO);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param maxTotal the maximum number of objects that can exists at one time (see {@link #setMaxTotal})
* @param minIdle the minimum number of idle objects to have in the pool at any one time (see {@link #setMinIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
* @param lifo whether or not the pools behave as LIFO (last in first out) queues (see {@link #setLifo})
* @since Pool 1.4
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, int maxTotal, int minIdle, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle, boolean lifo) {
_factory = factory;
_maxActive = maxActive;
_lifo = lifo;
switch(whenExhaustedAction) {
case WHEN_EXHAUSTED_BLOCK:
case WHEN_EXHAUSTED_FAIL:
case WHEN_EXHAUSTED_GROW:
_whenExhaustedAction = whenExhaustedAction;
break;
default:
throw new IllegalArgumentException("whenExhaustedAction " + whenExhaustedAction + " not recognized.");
}
_maxWait = maxWait;
_maxIdle = maxIdle;
_maxTotal = maxTotal;
_minIdle = minIdle;
_testOnBorrow = testOnBorrow;
_testOnReturn = testOnReturn;
_timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
_numTestsPerEvictionRun = numTestsPerEvictionRun;
_minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
_testWhileIdle = testWhileIdle;
_poolMap = new HashMap();
_poolList = new CursorableLinkedList();
startEvictor(_timeBetweenEvictionRunsMillis);
}
//--- public methods ---------------------------------------------
//--- configuration methods --------------------------------------
/**
* Returns the cap on the number of active instances per key.
* A negative value indicates no limit.
* @return the cap on the number of active instances per key.
* @see #setMaxActive
*/
public synchronized int getMaxActive() {
return _maxActive;
}
/**
* Sets the cap on the number of active instances per key.
* @param maxActive The cap on the number of active instances per key.
* Use a negative value for no limit.
* @see #getMaxActive
*/
public synchronized void setMaxActive(int maxActive) {
_maxActive = maxActive;
notifyAll();
}
/**
* Returns the overall maximum number of objects (across pools) that can
* exist at one time. A negative value indicates no limit.
* @return the maximum number of instances in circulation at one time.
* @see #setMaxTotal
*/
public synchronized int getMaxTotal() {
return _maxTotal;
}
/**
* Sets the cap on the total number of instances from all pools combined.
* When <code>maxTotal</code> is set to a
* positive value and {@link #borrowObject borrowObject} is invoked
* when at the limit with no idle instances available, an attempt is made to
* create room by clearing the oldest 15% of the elements from the keyed
* pools.
*
* @param maxTotal The cap on the total number of instances across pools.
* Use a negative value for no limit.
* @see #getMaxTotal
*/
public synchronized void setMaxTotal(int maxTotal) {
_maxTotal = maxTotal;
notifyAll();
}
/**
* Returns the action to take when the {@link #borrowObject} method
* is invoked when the pool is exhausted (the maximum number
* of "active" objects has been reached).
*
* @return one of {@link #WHEN_EXHAUSTED_BLOCK},
* {@link #WHEN_EXHAUSTED_FAIL} or {@link #WHEN_EXHAUSTED_GROW}
* @see #setWhenExhaustedAction
*/
public synchronized byte getWhenExhaustedAction() {
return _whenExhaustedAction;
}
/**
* Sets the action to take when the {@link #borrowObject} method
* is invoked when the pool is exhausted (the maximum number
* of "active" objects has been reached).
*
* @param whenExhaustedAction the action code, which must be one of
* {@link #WHEN_EXHAUSTED_BLOCK}, {@link #WHEN_EXHAUSTED_FAIL},
* or {@link #WHEN_EXHAUSTED_GROW}
* @see #getWhenExhaustedAction
*/
public synchronized void setWhenExhaustedAction(byte whenExhaustedAction) {
switch(whenExhaustedAction) {
case WHEN_EXHAUSTED_BLOCK:
case WHEN_EXHAUSTED_FAIL:
case WHEN_EXHAUSTED_GROW:
_whenExhaustedAction = whenExhaustedAction;
notifyAll();
break;
default:
throw new IllegalArgumentException("whenExhaustedAction " + whenExhaustedAction + " not recognized.");
}
}
/**
* Returns the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #setWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
*
* When less than or equal to 0, the {@link #borrowObject} method
* may block indefinitely.
*
* @return the maximum number of milliseconds borrowObject will block.
* @see #setMaxWait
* @see #setWhenExhaustedAction
* @see #WHEN_EXHAUSTED_BLOCK
*/
public synchronized long getMaxWait() {
return _maxWait;
}
/**
* Sets the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #setWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
*
* When less than or equal to 0, the {@link #borrowObject} method
* may block indefinitely.
*
* @param maxWait the maximum number of milliseconds borrowObject will block or negative for indefinitely.
* @see #getMaxWait
* @see #setWhenExhaustedAction
* @see #WHEN_EXHAUSTED_BLOCK
*/
public synchronized void setMaxWait(long maxWait) {
_maxWait = maxWait;
}
/**
* Returns the cap on the number of "idle" instances per key.
* @return the maximum number of "idle" instances that can be held
* in a given keyed pool.
* @see #setMaxIdle
*/
public synchronized int getMaxIdle() {
return _maxIdle;
}
/**
* Sets the cap on the number of "idle" instances in the pool.
* If maxIdle is set too low on heavily loaded systems it is possible you
* will see objects being destroyed and almost immediately new objects
* being created. This is a result of the active threads momentarily
* returning objects faster than they are requesting them them, causing the
* number of idle objects to rise above maxIdle. The best value for maxIdle
* for heavily loaded system will vary but the default is a good starting
* point.
* @param maxIdle the maximum number of "idle" instances that can be held
* in a given keyed pool. Use a negative value for no limit.
* @see #getMaxIdle
* @see #DEFAULT_MAX_IDLE
*/
public synchronized void setMaxIdle(int maxIdle) {
_maxIdle = maxIdle;
notifyAll();
}
/**
* Sets the minimum number of idle objects to maintain in each of the keyed
* pools. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0</code> and attempts to ensure
* that each pool has the required minimum number of instances are only
* made during idle object eviction runs.
* @param poolSize - The minimum size of the each keyed pool
* @since Pool 1.3
* @see #getMinIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setMinIdle(int poolSize) {
_minIdle = poolSize;
}
/**
* Returns the minimum number of idle objects to maintain in each of the keyed
* pools. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0</code> and attempts to ensure
* that each pool has the required minimum number of instances are only
* made during idle object eviction runs.
* @return minimum size of the each keyed pool
* @since Pool 1.3
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized int getMinIdle() {
return _minIdle;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned by the {@link #borrowObject}
* method. If the object fails to validate,
* it will be dropped from the pool, and we will attempt
* to borrow another.
*
* @return <code>true</code> if objects are validated before being borrowed.
* @see #setTestOnBorrow
*/
public boolean getTestOnBorrow() {
return _testOnBorrow;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned by the {@link #borrowObject}
* method. If the object fails to validate,
* it will be dropped from the pool, and we will attempt
* to borrow another.
*
* @param testOnBorrow whether object should be validated before being returned by borrowObject.
* @see #getTestOnBorrow
*/
public void setTestOnBorrow(boolean testOnBorrow) {
_testOnBorrow = testOnBorrow;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned to the pool within the
* {@link #returnObject}.
*
* @return <code>true</code> when objects will be validated before being returned.
* @see #setTestOnReturn
*/
public boolean getTestOnReturn() {
return _testOnReturn;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned to the pool within the
* {@link #returnObject}.
*
* @param testOnReturn <code>true</code> so objects will be validated before being returned.
* @see #getTestOnReturn
*/
public void setTestOnReturn(boolean testOnReturn) {
_testOnReturn = testOnReturn;
}
/**
* Returns the number of milliseconds to sleep between runs of the
* idle object evictor thread.
* When non-positive, no idle object evictor thread will be
* run.
*
* @return milliseconds to sleep between evictor runs.
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized long getTimeBetweenEvictionRunsMillis() {
return _timeBetweenEvictionRunsMillis;
}
/**
* Sets the number of milliseconds to sleep between runs of the
* idle object evictor thread.
* When non-positive, no idle object evictor thread will be
* run.
*
* @param timeBetweenEvictionRunsMillis milliseconds to sleep between evictor runs.
* @see #getTimeBetweenEvictionRunsMillis
*/
public synchronized void setTimeBetweenEvictionRunsMillis(long timeBetweenEvictionRunsMillis) {
_timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
startEvictor(_timeBetweenEvictionRunsMillis);
}
/**
* Returns the number of objects to examine during each run of the
* idle object evictor thread (if any).
*
* @return number of objects to examine each eviction run.
* @see #setNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized int getNumTestsPerEvictionRun() {
return _numTestsPerEvictionRun;
}
/**
* Sets the number of objects to examine during each run of the
* idle object evictor thread (if any).
* <p>
* When a negative value is supplied, <code>ceil({@link #getNumIdle()})/abs({@link #getNumTestsPerEvictionRun})</code>
* tests will be run. I.e., when the value is <code>-n</code>, roughly one <code>n</code>th of the
* idle objects will be tested per run.
*
* @param numTestsPerEvictionRun number of objects to examine each eviction run.
* @see #getNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setNumTestsPerEvictionRun(int numTestsPerEvictionRun) {
_numTestsPerEvictionRun = numTestsPerEvictionRun;
}
/**
* Returns the minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor
* (if any).
*
* @return minimum amount of time an object may sit idle in the pool before it is eligible for eviction.
* @see #setMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized long getMinEvictableIdleTimeMillis() {
return _minEvictableIdleTimeMillis;
}
/**
* Sets the minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor
* (if any).
* When non-positive, no objects will be evicted from the pool
* due to idle time alone.
*
* @param minEvictableIdleTimeMillis minimum amount of time an object may sit idle in the pool before it is eligible for eviction.
* @see #getMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) {
_minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* by the idle object evictor (if any). If an object
* fails to validate, it will be dropped from the pool.
*
* @return <code>true</code> when objects are validated when borrowed.
* @see #setTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized boolean getTestWhileIdle() {
return _testWhileIdle;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* by the idle object evictor (if any). If an object
* fails to validate, it will be dropped from the pool.
*
* @param testWhileIdle <code>true</code> so objects are validated when borrowed.
* @see #getTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setTestWhileIdle(boolean testWhileIdle) {
_testWhileIdle = testWhileIdle;
}
/**
* Sets the configuration.
* @param conf the new configuration to use.
* @see GenericKeyedObjectPool.Config
*/
public synchronized void setConfig(GenericKeyedObjectPool.Config conf) {
setMaxIdle(conf.maxIdle);
setMaxActive(conf.maxActive);
setMaxTotal(conf.maxTotal);
setMinIdle(conf.minIdle);
setMaxWait(conf.maxWait);
setWhenExhaustedAction(conf.whenExhaustedAction);
setTestOnBorrow(conf.testOnBorrow);
setTestOnReturn(conf.testOnReturn);
setTestWhileIdle(conf.testWhileIdle);
setNumTestsPerEvictionRun(conf.numTestsPerEvictionRun);
setMinEvictableIdleTimeMillis(conf.minEvictableIdleTimeMillis);
setTimeBetweenEvictionRunsMillis(conf.timeBetweenEvictionRunsMillis);
}
/**
* Whether or not the idle object pools act as LIFO queues. True means
* that borrowObject returns the most recently used ("last in") idle object
* in a pool (if there are idle instances available). False means that
* the pools behave as FIFO queues - objects are taken from idle object
* pools in the order that they are returned.
*
* @return <code>true</code> if the pools are configured to act as LIFO queues
* @since 1.4
*/
public synchronized boolean getLifo() {
return _lifo;
}
/**
* Sets the LIFO property of the pools. True means that borrowObject returns
* the most recently used ("last in") idle object in a pool (if there are
* idle instances available). False means that the pools behave as FIFO
* queues - objects are taken from idle object pools in the order that
* they are returned.
*
* @param lifo the new value for the lifo property
* @since 1.4
*/
public synchronized void setLifo(boolean lifo) {
this._lifo = lifo;
}
//-- ObjectPool methods ------------------------------------------
public Object borrowObject(Object key) throws Exception {
long starttime = System.currentTimeMillis();
for(;;) {
ObjectTimestampPair pair = null;
ObjectQueue pool = null;
synchronized (this) {
assertOpen();
pool = (ObjectQueue)(_poolMap.get(key));
if(null == pool) {
pool = new ObjectQueue();
_poolMap.put(key,pool);
_poolList.add(key);
}
// if there are any sleeping, just grab one of those
try {
pair = (ObjectTimestampPair)(pool.queue.removeFirst());
if(null != pair) {
_totalIdle--;
}
} catch(NoSuchElementException e) { /* ignored */
}
// otherwise
if(null == pair) {
// if there is a totalMaxActive and we are at the limit then
// we have to make room
if ((_maxTotal > 0)
&& (_totalActive + _totalIdle + _totalInternalProcessing >= _maxTotal)) {
clearOldest();
}
// check if we can create one
// (note we know that the num sleeping is 0, else we wouldn't be here)
if ((_maxActive < 0 || pool.activeCount + pool.internalProcessingCount < _maxActive) &&
(_maxTotal < 0 || _totalActive + _totalIdle + _totalInternalProcessing < _maxTotal)) {
// allow new object to be created
} else {
// the pool is exhausted
switch(_whenExhaustedAction) {
case WHEN_EXHAUSTED_GROW:
// allow new object to be created
break;
case WHEN_EXHAUSTED_FAIL:
throw new NoSuchElementException("Pool exhausted");
case WHEN_EXHAUSTED_BLOCK:
try {
if(_maxWait <= 0) {
wait();
} else {
// this code may be executed again after a notify then continue cycle
// so, need to calculate the amount of time to wait
final long elapsed = (System.currentTimeMillis() - starttime);
final long waitTime = _maxWait - elapsed;
if (waitTime > 0)
{
wait(waitTime);
}
}
} catch(InterruptedException e) {
Thread.currentThread().interrupt();
throw e;
}
if(_maxWait > 0 && ((System.currentTimeMillis() - starttime) >= _maxWait)) {
throw new NoSuchElementException("Timeout waiting for idle object");
} else {
continue; // keep looping
}
default:
throw new IllegalArgumentException("whenExhaustedAction " + _whenExhaustedAction + " not recognized.");
}
}
}
pool.incrementActiveCount();
}
boolean newlyCreated = false;
if (null == pair) {
try {
Object obj = _factory.makeObject(key);
pair = new ObjectTimestampPair(obj);
newlyCreated = true;
} finally {
if (!newlyCreated) {
// object cannot be created
synchronized (this) {
pool.decrementActiveCount();
notifyAll();
}
}
}
}
// activate & validate the object
try {
_factory.activateObject(key, pair.value);
if (_testOnBorrow && !_factory.validateObject(key, pair.value)) {
throw new Exception("ValidateObject failed");
}
return pair.value;
} catch (Throwable e) {
// object cannot be activated or is invalid
try {
_factory.destroyObject(key,pair.value);
} catch (Throwable e2) {
// cannot destroy broken object
}
synchronized (this) {
pool.decrementActiveCount();
notifyAll();
}
if(newlyCreated) {
throw new NoSuchElementException(
"Could not create a validated object, cause: "
+ e.getMessage());
}
else {
continue; // keep looping
}
}
}
}
/**
* Clears the pool, removing all pooled instances.
*/
public synchronized void clear() {
for(Iterator entries = _poolMap.entrySet().iterator(); entries.hasNext(); ) {
final Map.Entry entry = (Map.Entry)entries.next();
final Object key = entry.getKey();
final CursorableLinkedList list = ((ObjectQueue)(entry.getValue())).queue;
for(Iterator it = list.iterator(); it.hasNext(); ) {
try {
_factory.destroyObject(key,((ObjectTimestampPair)(it.next())).value);
} catch(Exception e) {
// ignore error, keep destroying the rest
}
it.remove();
}
}
_poolMap.clear();
_poolList.clear();
_totalIdle = 0;
notifyAll();
}
/**
* Method clears oldest 15% of objects in pool. The method sorts the
* objects into a TreeMap and then iterates the first 15% for removal
* @since Pool 1.3
*/
public synchronized void clearOldest() {
// build sorted map of idle objects
final Map map = new TreeMap();
for (Iterator keyiter = _poolMap.keySet().iterator(); keyiter.hasNext();) {
final Object key = keyiter.next();
final CursorableLinkedList list = ((ObjectQueue)_poolMap.get(key)).queue;
for (Iterator it = list.iterator(); it.hasNext();) {
// each item into the map uses the objectimestamppair object
// as the key. It then gets sorted based on the timstamp field
// each value in the map is the parent list it belongs in.
map.put(it.next(), key);
}
}
// Now iterate created map and kill the first 15% plus one to account for zero
Set setPairKeys = map.entrySet();
int itemsToRemove = ((int) (map.size() * 0.15)) + 1;
Iterator iter = setPairKeys.iterator();
while (iter.hasNext() && itemsToRemove > 0) {
Map.Entry entry = (Map.Entry) iter.next();
// kind of backwards on naming. In the map, each key is the objecttimestamppair
// because it has the ordering with the timestamp value. Each value that the
// key references is the key of the list it belongs to.
Object key = entry.getValue();
ObjectTimestampPair pairTimeStamp = (ObjectTimestampPair) entry.getKey();
final CursorableLinkedList list =
((ObjectQueue)(_poolMap.get(key))).queue;
list.remove(pairTimeStamp);
try {
_factory.destroyObject(key, pairTimeStamp.value);
} catch (Exception e) {
// ignore error, keep destroying the rest
}
// if that was the last object for that key, drop that pool
if (list.isEmpty()) {
_poolMap.remove(key);
_poolList.remove(key);
}
_totalIdle--;
itemsToRemove--;
}
notifyAll();
}
/**
* Clears the specified pool, removing all pooled instances corresponding to the given <code>key</code>.
*
* @param key the key to clear
*/
public synchronized void clear(Object key) {
final ObjectQueue pool = (ObjectQueue)(_poolMap.remove(key));
if(null == pool) {
return;
} else {
_poolList.remove(key);
for(Iterator it = pool.queue.iterator(); it.hasNext(); ) {
try {
_factory.destroyObject(key,((ObjectTimestampPair)(it.next())).value);
} catch(Exception e) {
// ignore error, keep destroying the rest
}
it.remove();
_totalIdle--;
}
}
notifyAll();
}
/**
* Returns the total number of instances current borrowed from this pool but not yet returned.
*
* @return the total number of instances currently borrowed from this pool
*/
public synchronized int getNumActive() {
return _totalActive;
}
/**
* Returns the total number of instances currently idle in this pool.
*
* @return the total number of instances currently idle in this pool
*/
public synchronized int getNumIdle() {
return _totalIdle;
}
/**
* Returns the number of instances currently borrowed from but not yet returned
* to the pool corresponding to the given <code>key</code>.
*
* @param key the key to query
* @return the number of instances corresponding to the given <code>key</code> currently borrowed in this pool
*/
public synchronized int getNumActive(Object key) {
final ObjectQueue pool = (ObjectQueue)(_poolMap.get(key));
return pool != null ? pool.activeCount : 0;
}
/**
* Returns the number of instances corresponding to the given <code>key</code> currently idle in this pool.
*
* @param key the key to query
* @return the number of instances corresponding to the given <code>key</code> currently idle in this pool
*/
public synchronized int getNumIdle(Object key) {
final ObjectQueue pool = (ObjectQueue)(_poolMap.get(key));
return pool != null ? pool.queue.size() : 0;
}
public void returnObject(Object key, Object obj) throws Exception {
try {
addObjectToPool(key, obj, true);
} catch (Exception e) {
if (_factory != null) {
try {
_factory.destroyObject(key, obj);
} catch (Exception e2) {
// swallowed
}
// TODO: Correctness here depends on control in addObjectToPool.
// These two methods should be refactored, removing the
// "behavior flag",decrementNumActive, from addObjectToPool.
ObjectQueue pool = (ObjectQueue) (_poolMap.get(key));
if (pool != null) {
synchronized(this) {
pool.decrementActiveCount();
notifyAll();
}
}
}
}
}
private void addObjectToPool(Object key, Object obj,
boolean decrementNumActive) throws Exception {
// if we need to validate this object, do so
boolean success = true; // whether or not this object passed validation
if(_testOnReturn && !_factory.validateObject(key, obj)) {
success = false;
} else {
_factory.passivateObject(key, obj);
}
boolean shouldDestroy = !success;
ObjectQueue pool;
// Add instance to pool if there is room and it has passed validation
// (if testOnreturn is set)
synchronized (this) {
// grab the pool (list) of objects associated with the given key
pool = (ObjectQueue) (_poolMap.get(key));
// if it doesn't exist, create it
if(null == pool) {
pool = new ObjectQueue();
_poolMap.put(key, pool);
_poolList.add(key);
}
if (isClosed()) {
shouldDestroy = true;
} else {
// if there's no space in the pool, flag the object for destruction
// else if we passivated successfully, return it to the pool
if(_maxIdle >= 0 && (pool.queue.size() >= _maxIdle)) {
shouldDestroy = true;
} else if(success) {
// borrowObject always takes the first element from the queue,
// so for LIFO, push on top, FIFO add to end
if (_lifo) {
pool.queue.addFirst(new ObjectTimestampPair(obj));
} else {
pool.queue.addLast(new ObjectTimestampPair(obj));
}
_totalIdle++;
}
}
}
// Destroy the instance if necessary
if(shouldDestroy) {
try {
_factory.destroyObject(key, obj);
} catch(Exception e) {
// ignored?
}
}
// Decrement active count *after* destroy if applicable
if (decrementNumActive) {
synchronized(this) {
pool.decrementActiveCount();
notifyAll();
}
}
}
public void invalidateObject(Object key, Object obj) throws Exception {
try {
_factory.destroyObject(key, obj);
} finally {
synchronized (this) {
ObjectQueue pool = (ObjectQueue) (_poolMap.get(key));
if(null == pool) {
pool = new ObjectQueue();
_poolMap.put(key, pool);
_poolList.add(key);
}
pool.decrementActiveCount();
notifyAll(); // _totalActive has changed
}
}
}
/**
* Create an object using the {@link KeyedPoolableObjectFactory#makeObject factory},
* passivate it, and then place it in the idle object pool.
* <code>addObject</code> is useful for "pre-loading" a pool with idle objects.
*
* @param key the key a new instance should be added to
* @throws Exception when {@link KeyedPoolableObjectFactory#makeObject} fails.
* @throws IllegalStateException when no {@link #setFactory factory} has been set or after {@link #close} has been called on this pool.
*/
public void addObject(Object key) throws Exception {
assertOpen();
if (_factory == null) {
throw new IllegalStateException("Cannot add objects without a factory.");
}
Object obj = _factory.makeObject(key);
try {
assertOpen();
addObjectToPool(key, obj, false);
} catch (IllegalStateException ex) { // Pool closed
try {
_factory.destroyObject(key, obj);
} catch (Exception ex2) {
// swallow
}
throw ex;
}
}
/**
* Registers a key for pool control.
*
* If <code>populateImmediately</code> is <code>true</code> and
* <code>minIdle > 0,</code> the pool under the given key will be
* populated immediately with <code>minIdle</code> idle instances.
*
* @param key - The key to register for pool control.
* @param populateImmediately - If this is <code>true</code>, the pool
* will be populated immediately.
* @since Pool 1.3
*/
public synchronized void preparePool(Object key, boolean populateImmediately) {
ObjectQueue pool = (ObjectQueue)(_poolMap.get(key));
if (null == pool) {
pool = new ObjectQueue();
_poolMap.put(key,pool);
_poolList.add(key);
}
if (populateImmediately) {
try {
// Create the pooled objects
ensureMinIdle(key);
}
catch (Exception e) {
//Do nothing
}
}
}
public void close() throws Exception {
super.close();
synchronized (this) {
clear();
if(null != _evictionCursor) {
_evictionCursor.close();
_evictionCursor = null;
}
if(null != _evictionKeyCursor) {
_evictionKeyCursor.close();
_evictionKeyCursor = null;
}
startEvictor(-1L);
}
}
public synchronized void setFactory(KeyedPoolableObjectFactory factory) throws IllegalStateException {
assertOpen();
if(0 < getNumActive()) {
throw new IllegalStateException("Objects are already active");
} else {
clear();
_factory = factory;
}
}
/**
* <p>Perform <code>numTests</code> idle object eviction tests, evicting
* examined objects that meet the criteria for eviction. If
* <code>testWhileIdle</code> is true, examined objects are validated
* when visited (and removed if invalid); otherwise only objects that
* have been idle for more than <code>minEvicableIdletimeMillis</code>
* are removed.</p>
*
* <p>Successive activations of this method examine objects in keyed pools
* in sequence, cycling through the keys and examining objects in
* oldest-to-youngest order within the keyed pools.</p>
*
* @throws Exception when there is a problem evicting idle objects.
*/
public synchronized void evict() throws Exception {
// Initialize key to last key value
Object key = null;
if (_evictionKeyCursor != null &&
_evictionKeyCursor._lastReturned != null) {
key = _evictionKeyCursor._lastReturned.value();
}
for (int i=0,m=getNumTests(); i<m; i++) {
// make sure pool map is not empty; otherwise do nothing
if (_poolMap == null || _poolMap.size() == 0) {
continue;
}
// if we don't have a key cursor, then create one
if (null == _evictionKeyCursor) {
resetEvictionKeyCursor();
key = null;
}
// if we don't have an object cursor, create one
if (null == _evictionCursor) {
// if the _evictionKeyCursor has a next value, use this key
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else {
// Reset the key cursor and try again
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
if (_evictionCursor == null) {
continue; // should never happen; do nothing
}
// If eviction cursor is exhausted, try to move
// to the next key and reset
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else { // Need to reset Key cursor
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
}
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
continue; // reset failed, do nothing
}
// if LIFO and the _evictionCursor has a previous object,
// or FIFO and _evictionCursor has a next object, test it
ObjectTimestampPair pair = _lifo ?
(ObjectTimestampPair) _evictionCursor.previous() :
(ObjectTimestampPair) _evictionCursor.next();
boolean removeObject=false;
if((_minEvictableIdleTimeMillis > 0) &&
(System.currentTimeMillis() - pair.tstamp >
_minEvictableIdleTimeMillis)) {
removeObject=true;
}
if(_testWhileIdle && removeObject == false) {
boolean active = false;
try {
_factory.activateObject(key,pair.value);
active = true;
} catch(Exception e) {
removeObject=true;
}
if(active) {
if(!_factory.validateObject(key,pair.value)) {
removeObject=true;
} else {
try {
_factory.passivateObject(key,pair.value);
} catch(Exception e) {
removeObject=true;
}
}
}
}
if(removeObject) {
try {
_evictionCursor.remove();
_totalIdle--;
_factory.destroyObject(key, pair.value);
// Do not remove the key from the _poolList or _poolmap,
// even if the list stored in the _poolMap for this key is
// empty when minIdle > 0.
//
// Otherwise if it was the last object for that key,
// drop that pool
if (_minIdle == 0) {
ObjectQueue objectQueue =
(ObjectQueue)_poolMap.get(key);
if (objectQueue != null &&
objectQueue.queue.isEmpty()) {
_poolMap.remove(key);
_poolList.remove(key);
}
}
} catch(Exception e) {
// ignored
}
}
}
}
/**
* Resets the eviction key cursor and closes any
* associated eviction object cursor
*/
private void resetEvictionKeyCursor() {
if (_evictionKeyCursor != null) {
_evictionKeyCursor.close();
}
_evictionKeyCursor = _poolList.cursor();
if (null != _evictionCursor) {
_evictionCursor.close();
_evictionCursor = null;
}
}
/**
* Resets the eviction object cursor for the given key
*
* @param key eviction key
*/
private void resetEvictionObjectCursor(Object key) {
if (_evictionCursor != null) {
_evictionCursor.close();
}
if (_poolMap == null) {
return;
}
ObjectQueue pool = (ObjectQueue) (_poolMap.get(key));
if (pool != null) {
CursorableLinkedList queue = pool.queue;
_evictionCursor = queue.cursor(_lifo ? queue.size() : 0);
}
}
/**
* Iterates through all the known keys and creates any necessary objects to maintain
* the minimum level of pooled objects.
* @see #getMinIdle
* @see #setMinIdle
* @throws Exception If there was an error whilst creating the pooled objects.
*/
private void ensureMinIdle() throws Exception {
//Check if should sustain the pool
if (_minIdle > 0) {
Object[] keysCopy;
synchronized(this) {
// Get the current set of keys
keysCopy = _poolMap.keySet().toArray();
}
// Loop through all elements in _poolList
// Find out the total number of max active and max idle for that class
// If the number is less than the minIdle, do creation loop to boost numbers
for (int i=0; i < keysCopy.length; i++) {
//Get the next key to process
ensureMinIdle(keysCopy[i]);
}
}
}
/**
* Re-creates any needed objects to maintain the minimum levels of
* pooled objects for the specified key.
*
* This method uses {@link #calculateDefecit} to calculate the number
* of objects to be created. {@link #calculateDefecit} can be overridden to
* provide a different method of calculating the number of objects to be
* created.
* @param key The key to process
* @throws Exception If there was an error whilst creating the pooled objects
*/
private void ensureMinIdle(Object key) throws Exception {
// Calculate current pool objects
ObjectQueue pool;
synchronized(this) {
pool = (ObjectQueue)(_poolMap.get(key));
}
if (pool == null) {
return;
}
// this method isn't synchronized so the
// calculateDeficit is done at the beginning
// as a loop limit and a second time inside the loop
// to stop when another thread already returned the
// needed objects
int objectDeficit = calculateDefecit(pool, false);
for (int i = 0; i < objectDeficit && calculateDefecit(pool, true) > 0; i++) {
try {
addObject(key);
} finally {
synchronized (this) {
pool.decrementInternalProcessingCount();
notifyAll();
}
}
}
}
//--- non-public methods ----------------------------------------
/**
* Start the eviction thread or service, or when
* <code>delay</code> is non-positive, stop it
* if it is already running.
*
* @param delay milliseconds between evictor runs.
*/
protected synchronized void startEvictor(long delay) {
if(null != _evictor) {
EvictionTimer.cancel(_evictor);
_evictor = null;
}
if(delay > 0) {
_evictor = new Evictor();
EvictionTimer.schedule(_evictor, delay, delay);
}
}
synchronized String debugInfo() {
StringBuffer buf = new StringBuffer();
buf.append("Active: ").append(getNumActive()).append("\n");
buf.append("Idle: ").append(getNumIdle()).append("\n");
Iterator it = _poolMap.keySet().iterator();
while(it.hasNext()) {
buf.append("\t").append(_poolMap.get(it.next())).append("\n");
}
return buf.toString();
}
private int getNumTests() {
if(_numTestsPerEvictionRun >= 0) {
return _numTestsPerEvictionRun;
} else {
return(int)(Math.ceil(_totalIdle/Math.abs((double)_numTestsPerEvictionRun)));
}
}
/**
* This returns the number of objects to create during the pool
* sustain cycle. This will ensure that the minimum number of idle
* connections is maintained without going past the maxPool value.
* <p>
* This method has been left public so derived classes can override
* the way the defecit is calculated. ie... Increase/decrease the pool
* size at certain times of day to accommodate for usage patterns.
*
* @param key - The key of the pool to calculate the number of
* objects to be re-created
* @param incrementInternal - Should the count of objects currently under
* some form of internal processing be
* incremented?
* @return The number of objects to be created
*/
private synchronized int calculateDefecit(ObjectQueue pool,
boolean incrementInternal) {
int objectDefecit = 0;
//Calculate no of objects needed to be created, in order to have
//the number of pooled objects < maxActive();
objectDefecit = getMinIdle() - pool.queue.size();
if (getMaxActive() > 0) {
int growLimit = Math.max(0, getMaxActive() - pool.activeCount - pool.queue.size() - pool.internalProcessingCount);
objectDefecit = Math.min(objectDefecit, growLimit);
}
// Take the maxTotal limit into account
if (getMaxTotal() > 0) {
int growLimit = Math.max(0, getMaxTotal() - getNumActive() - getNumIdle() - _totalInternalProcessing);
objectDefecit = Math.min(objectDefecit, growLimit);
}
if (incrementInternal && objectDefecit > 0) {
pool.incrementInternalProcessingCount();
}
return objectDefecit;
}
//--- inner classes ----------------------------------------------
/**
* A "struct" that keeps additional information about the actual queue of pooled objects.
*/
private class ObjectQueue {
private int activeCount = 0;
private final CursorableLinkedList queue = new CursorableLinkedList();
private int internalProcessingCount = 0;
void incrementActiveCount() {
_totalActive++;
activeCount++;
}
void decrementActiveCount() {
_totalActive--;
if (activeCount > 0) {
activeCount--;
}
}
void incrementInternalProcessingCount() {
_totalInternalProcessing++;
internalProcessingCount++;
}
void decrementInternalProcessingCount() {
_totalInternalProcessing--;
internalProcessingCount--;
}
}
/**
* A simple "struct" encapsulating an object instance and a timestamp.
*
* Implements Comparable, objects are sorted from old to new.
*
* This is also used by {@link GenericObjectPool}.
*/
static class ObjectTimestampPair implements Comparable {
Object value;
long tstamp;
ObjectTimestampPair(Object val) {
this(val, System.currentTimeMillis());
}
ObjectTimestampPair(Object val, long time) {
value = val;
tstamp = time;
}
public String toString() {
return value + ";" + tstamp;
}
public int compareTo(Object obj) {
return compareTo((ObjectTimestampPair) obj);
}
public int compareTo(ObjectTimestampPair other) {
final long tstampdiff = this.tstamp - other.tstamp;
if (tstampdiff == 0) {
// make sure the natural ordering is consistent with equals
// see java.lang.Comparable Javadocs
return System.identityHashCode(this) - System.identityHashCode(other);
} else {
// handle int overflow
return (int)Math.min(Math.max(tstampdiff, Integer.MIN_VALUE), Integer.MAX_VALUE);
}
}
}
/**
* The idle object evictor {@link TimerTask}.
* @see GenericKeyedObjectPool#setTimeBetweenEvictionRunsMillis
*/
private class Evictor extends TimerTask {
public void run() {
//Evict from the pool
try {
evict();
} catch(Exception e) {
// ignored
} catch(OutOfMemoryError oome) {
// Log problem but give evictor thread a chance to continue in
// case error is recoverable
oome.printStackTrace(System.err);
}
//Re-create the connections.
try {
ensureMinIdle();
} catch (Exception e) {
// ignored
}
}
}
/**
* A simple "struct" encapsulating the
* configuration information for a <code>GenericKeyedObjectPool</code>.
* @see GenericKeyedObjectPool#GenericKeyedObjectPool(KeyedPoolableObjectFactory,GenericKeyedObjectPool.Config)
* @see GenericKeyedObjectPool#setConfig
*/
public static class Config {
/**
* @see GenericKeyedObjectPool#setMaxIdle
*/
public int maxIdle = GenericKeyedObjectPool.DEFAULT_MAX_IDLE;
/**
* @see GenericKeyedObjectPool#setMaxActive
*/
public int maxActive = GenericKeyedObjectPool.DEFAULT_MAX_ACTIVE;
/**
* @see GenericKeyedObjectPool#setMaxTotal
*/
public int maxTotal = GenericKeyedObjectPool.DEFAULT_MAX_TOTAL;
/**
* @see GenericKeyedObjectPool#setMinIdle
*/
public int minIdle = GenericKeyedObjectPool.DEFAULT_MIN_IDLE;
/**
* @see GenericKeyedObjectPool#setMaxWait
*/
public long maxWait = GenericKeyedObjectPool.DEFAULT_MAX_WAIT;
/**
* @see GenericKeyedObjectPool#setWhenExhaustedAction
*/
public byte whenExhaustedAction = GenericKeyedObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION;
/**
* @see GenericKeyedObjectPool#setTestOnBorrow
*/
public boolean testOnBorrow = GenericKeyedObjectPool.DEFAULT_TEST_ON_BORROW;
/**
* @see GenericKeyedObjectPool#setTestOnReturn
*/
public boolean testOnReturn = GenericKeyedObjectPool.DEFAULT_TEST_ON_RETURN;
/**
* @see GenericKeyedObjectPool#setTestWhileIdle
*/
public boolean testWhileIdle = GenericKeyedObjectPool.DEFAULT_TEST_WHILE_IDLE;
/**
* @see GenericKeyedObjectPool#setTimeBetweenEvictionRunsMillis
*/
public long timeBetweenEvictionRunsMillis = GenericKeyedObjectPool.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
/**
* @see GenericKeyedObjectPool#setNumTestsPerEvictionRun
*/
public int numTestsPerEvictionRun = GenericKeyedObjectPool.DEFAULT_NUM_TESTS_PER_EVICTION_RUN;
/**
* @see GenericKeyedObjectPool#setMinEvictableIdleTimeMillis
*/
public long minEvictableIdleTimeMillis = GenericKeyedObjectPool.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/**
* @see GenericKeyedObjectPool#setLifo
*/
public boolean lifo = GenericKeyedObjectPool.DEFAULT_LIFO;
}
//--- protected attributes ---------------------------------------
/**
* The cap on the number of idle instances in the pool.
* @see #setMaxIdle
* @see #getMaxIdle
*/
private int _maxIdle = DEFAULT_MAX_IDLE;
/**
* The minimum no of idle objects to keep in the pool.
* @see #setMinIdle
* @see #getMinIdle
*/
private int _minIdle = DEFAULT_MIN_IDLE;
/**
* The cap on the number of active instances from the pool.
* @see #setMaxActive
* @see #getMaxActive
*/
private int _maxActive = DEFAULT_MAX_ACTIVE;
/**
* The cap on the total number of instances from the pool if non-positive.
* @see #setMaxTotal
* @see #getMaxTotal
*/
private int _maxTotal = DEFAULT_MAX_TOTAL;
/**
* The maximum amount of time (in millis) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #getWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
*
* When less than or equal to 0, the {@link #borrowObject} method
* may block indefinitely.
*
* @see #setMaxWait
* @see #getMaxWait
* @see #WHEN_EXHAUSTED_BLOCK
* @see #setWhenExhaustedAction
* @see #getWhenExhaustedAction
*/
private long _maxWait = DEFAULT_MAX_WAIT;
/**
* The action to take when the {@link #borrowObject} method
* is invoked when the pool is exhausted (the maximum number
* of "active" objects has been reached).
*
* @see #WHEN_EXHAUSTED_BLOCK
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #DEFAULT_WHEN_EXHAUSTED_ACTION
* @see #setWhenExhaustedAction
* @see #getWhenExhaustedAction
*/
private byte _whenExhaustedAction = DEFAULT_WHEN_EXHAUSTED_ACTION;
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned by the {@link #borrowObject}
* method. If the object fails to validate,
* it will be dropped from the pool, and we will attempt
* to borrow another.
*
* @see #setTestOnBorrow
* @see #getTestOnBorrow
*/
private volatile boolean _testOnBorrow = DEFAULT_TEST_ON_BORROW;
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned to the pool within the
* {@link #returnObject}.
*
* @see #getTestOnReturn
* @see #setTestOnReturn
*/
private volatile boolean _testOnReturn = DEFAULT_TEST_ON_RETURN;
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* by the idle object evictor (if any). If an object
* fails to validate, it will be dropped from the pool.
*
* @see #setTestWhileIdle
* @see #getTestWhileIdle
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private boolean _testWhileIdle = DEFAULT_TEST_WHILE_IDLE;
/**
* The number of milliseconds to sleep between runs of the
* idle object evictor thread.
* When non-positive, no idle object evictor thread will be
* run.
*
* @see #setTimeBetweenEvictionRunsMillis
* @see #getTimeBetweenEvictionRunsMillis
*/
private long _timeBetweenEvictionRunsMillis = DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
/**
* The number of objects to examine during each run of the
* idle object evictor thread (if any).
* <p>
* When a negative value is supplied, <code>ceil({@link #getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</code>
* tests will be run. I.e., when the value is <code>-n</code>, roughly one <code>n</code>th of the
* idle objects will be tested per run.
*
* @see #setNumTestsPerEvictionRun
* @see #getNumTestsPerEvictionRun
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private int _numTestsPerEvictionRun = DEFAULT_NUM_TESTS_PER_EVICTION_RUN;
/**
* The minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor
* (if any).
* When non-positive, no objects will be evicted from the pool
* due to idle time alone.
*
* @see #setMinEvictableIdleTimeMillis
* @see #getMinEvictableIdleTimeMillis
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private long _minEvictableIdleTimeMillis = DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/** My hash of pools (ObjectQueue). */
private Map _poolMap = null;
/** The total number of active instances. */
private int _totalActive = 0;
/** The total number of idle instances. */
private int _totalIdle = 0;
/**
* The number of objects subject to some form of internal processing
* (usually creation or destruction) that should be included in the total
* number of objects but are neither active nor idle.
*/
private int _totalInternalProcessing = 0;
/** My {@link KeyedPoolableObjectFactory}. */
private KeyedPoolableObjectFactory _factory = null;
/**
* My idle object eviction {@link TimerTask}, if any.
*/
private Evictor _evictor = null;
/**
* A cursorable list of my pools.
* @see GenericKeyedObjectPool.Evictor#run
*/
private CursorableLinkedList _poolList = null;
private CursorableLinkedList.Cursor _evictionCursor = null;
private CursorableLinkedList.Cursor _evictionKeyCursor = null;
/** Whether or not the pools behave as LIFO queues (last in first out) */
private boolean _lifo = DEFAULT_LIFO;
}
| src/java/org/apache/commons/pool/impl/GenericKeyedObjectPool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool.impl;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.TreeMap;
import java.util.TimerTask;
import org.apache.commons.pool.BaseKeyedObjectPool;
import org.apache.commons.pool.KeyedObjectPool;
import org.apache.commons.pool.KeyedPoolableObjectFactory;
/**
* A configurable <code>KeyedObjectPool</code> implementation.
* <p>
* When coupled with the appropriate {@link KeyedPoolableObjectFactory},
* <code>GenericKeyedObjectPool</code> provides robust pooling functionality for
* keyed objects. A <code>GenericKeyedObjectPool</code> can be viewed as a map
* of pools, keyed on the (unique) key values provided to the
* {@link #preparePool preparePool}, {@link #addObject addObject} or
* {@link #borrowObject borrowObject} methods. Each time a new key value is
* provided to one of these methods, a new pool is created under the given key
* to be managed by the containing <code>GenericKeyedObjectPool.</code>
* </p>
* <p>A <code>GenericKeyedObjectPool</code> provides a number of configurable
* parameters:</p>
* <ul>
* <li>
* {@link #setMaxActive maxActive} controls the maximum number of objects
* (per key) that can be borrowed from the pool at one time. When
* non-positive, there is no limit to the number of objects per key.
* When {@link #setMaxActive maxActive} is exceeded, the keyed pool is said
* to be exhausted. The default setting for this parameter is 8.
* </li>
* <li>
* {@link #setMaxTotal maxTotal} sets a global limit on the number of objects
* that can be in circulation (active or idle) within the combined set of
* pools. When non-positive, there is no limit to the total number of
* objects in circulation. When {@link #setMaxTotal maxTotal} is exceeded,
* all keyed pools are exhausted. When <code>maxTotal</code> is set to a
* positive value and {@link #borrowObject borrowObject} is invoked
* when at the limit with no idle instances available, an attempt is made to
* create room by clearing the oldest 15% of the elements from the keyed
* pools. The default setting for this parameter is -1 (no limit).
* </li>
* <li>
* {@link #setMaxIdle maxIdle} controls the maximum number of objects that can
* sit idle in the pool (per key) at any time. When negative, there
* is no limit to the number of objects that may be idle per key. The
* default setting for this parameter is 8.
* </li>
* <li>
* {@link #setWhenExhaustedAction whenExhaustedAction} specifies the
* behavior of the {@link #borrowObject borrowObject} method when a keyed
* pool is exhausted:
* <ul>
* <li>
* When {@link #setWhenExhaustedAction whenExhaustedAction} is
* {@link #WHEN_EXHAUSTED_FAIL}, {@link #borrowObject borrowObject} will throw
* a {@link NoSuchElementException}
* </li>
* <li>
* When {@link #setWhenExhaustedAction whenExhaustedAction} is
* {@link #WHEN_EXHAUSTED_GROW}, {@link #borrowObject borrowObject} will create a new
* object and return it (essentially making {@link #setMaxActive maxActive}
* meaningless.)
* </li>
* <li>
* When {@link #setWhenExhaustedAction whenExhaustedAction}
* is {@link #WHEN_EXHAUSTED_BLOCK}, {@link #borrowObject borrowObject} will block
* (invoke {@link Object#wait() wait} until a new or idle object is available.
* If a positive {@link #setMaxWait maxWait}
* value is supplied, the {@link #borrowObject borrowObject} will block for at
* most that many milliseconds, after which a {@link NoSuchElementException}
* will be thrown. If {@link #setMaxWait maxWait} is non-positive,
* the {@link #borrowObject borrowObject} method will block indefinitely.
* </li>
* </ul>
* The default <code>whenExhaustedAction</code> setting is
* {@link #WHEN_EXHAUSTED_BLOCK}.
* </li>
* <li>
* When {@link #setTestOnBorrow testOnBorrow} is set, the pool will
* attempt to validate each object before it is returned from the
* {@link #borrowObject borrowObject} method. (Using the provided factory's
* {@link KeyedPoolableObjectFactory#validateObject validateObject} method.)
* Objects that fail to validate will be dropped from the pool, and a
* different object will be borrowed. The default setting for this parameter
* is <code>false.</code>
* </li>
* <li>
* When {@link #setTestOnReturn testOnReturn} is set, the pool will
* attempt to validate each object before it is returned to the pool in the
* {@link #returnObject returnObject} method. (Using the provided factory's
* {@link KeyedPoolableObjectFactory#validateObject validateObject}
* method.) Objects that fail to validate will be dropped from the pool.
* The default setting for this parameter is <code>false.</code>
* </li>
* </ul>
* <p>
* Optionally, one may configure the pool to examine and possibly evict objects
* as they sit idle in the pool and to ensure that a minimum number of idle
* objects is maintained for each key. This is performed by an
* "idle object eviction" thread, which runs asynchronously. Caution should be
* used when configuring this optional feature. Eviction runs require an
* exclusive synchronization lock on the pool, so if they run too frequently
* and / or incur excessive latency when creating, destroying or validating
* object instances, performance issues may result. The idle object eviction
* thread may be configured using the following attributes:
* <ul>
* <li>
* {@link #setTimeBetweenEvictionRunsMillis timeBetweenEvictionRunsMillis}
* indicates how long the eviction thread should sleep before "runs" of examining
* idle objects. When non-positive, no eviction thread will be launched. The
* default setting for this parameter is -1 (i.e., by default, idle object
* eviction is disabled).
* </li>
* <li>
* {@link #setMinEvictableIdleTimeMillis minEvictableIdleTimeMillis}
* specifies the minimum amount of time that an object may sit idle in the
* pool before it is eligible for eviction due to idle time. When
* non-positive, no object will be dropped from the pool due to idle time
* alone. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting
* for this parameter is 30 minutes.
* </li>
* <li>
* {@link #setTestWhileIdle testWhileIdle} indicates whether or not idle
* objects should be validated using the factory's
* {@link KeyedPoolableObjectFactory#validateObject validateObject} method
* during idle object eviction runs. Objects that fail to validate will be
* dropped from the pool. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0.</code> The default setting
* for this parameter is <code>false.</code>
* </li>
* <li>
* {@link #setMinIdle minIdle} sets a target value for the minimum number of
* idle objects (per key) that should always be available. If this parameter
* is set to a positive number and
* <code>timeBetweenEvictionRunsMillis > 0,</code> each time the idle object
* eviction thread runs, it will try to create enough idle instances so that
* there will be <code>minIdle</code> idle instances available under each
* key. This parameter is also used by {@link #preparePool preparePool}
* if <code>true</code> is provided as that method's
* <code>populateImmediately</code> parameter. The default setting for this
* parameter is 0.
* </li>
* </ul>
* <p>
* The pools can be configured to behave as LIFO queues with respect to idle
* objects - always returning the most recently used object from the pool,
* or as FIFO queues, where borrowObject always returns the oldest object
* in the idle object pool.
* <ul>
* <li>
* {@link #setLifo <i>Lifo</i>}
* determines whether or not the pools return idle objects in
* last-in-first-out order. The default setting for this parameter is
* <code>true.</code>
* </li>
* </ul>
* <p>
* GenericKeyedObjectPool is not usable without a {@link KeyedPoolableObjectFactory}. A
* non-<code>null</code> factory must be provided either as a constructor argument
* or via a call to {@link #setFactory setFactory} before the pool is used.
* </p>
* @see GenericObjectPool
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision$ $Date$
* @since Pool 1.0
*/
public class GenericKeyedObjectPool extends BaseKeyedObjectPool implements KeyedObjectPool {
//--- public constants -------------------------------------------
/**
* A "when exhausted action" type indicating that when the pool is
* exhausted (i.e., the maximum number of active objects has
* been reached), the {@link #borrowObject}
* method should fail, throwing a {@link NoSuchElementException}.
* @see #WHEN_EXHAUSTED_BLOCK
* @see #WHEN_EXHAUSTED_GROW
* @see #setWhenExhaustedAction
*/
public static final byte WHEN_EXHAUSTED_FAIL = 0;
/**
* A "when exhausted action" type indicating that when the pool
* is exhausted (i.e., the maximum number
* of active objects has been reached), the {@link #borrowObject}
* method should block until a new object is available, or the
* {@link #getMaxWait maximum wait time} has been reached.
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #setMaxWait
* @see #getMaxWait
* @see #setWhenExhaustedAction
*/
public static final byte WHEN_EXHAUSTED_BLOCK = 1;
/**
* A "when exhausted action" type indicating that when the pool is
* exhausted (i.e., the maximum number
* of active objects has been reached), the {@link #borrowObject}
* method should simply create a new object anyway.
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #setWhenExhaustedAction
*/
public static final byte WHEN_EXHAUSTED_GROW = 2;
/**
* The default cap on the number of idle instances (per key) in the pool.
* @see #getMaxIdle
* @see #setMaxIdle
*/
public static final int DEFAULT_MAX_IDLE = 8;
/**
* The default cap on the total number of active instances (per key)
* from the pool.
* @see #getMaxActive
* @see #setMaxActive
*/
public static final int DEFAULT_MAX_ACTIVE = 8;
/**
* The default cap on the the overall maximum number of objects that can
* exist at one time.
* @see #getMaxTotal
* @see #setMaxTotal
*/
public static final int DEFAULT_MAX_TOTAL = -1;
/**
* The default "when exhausted action" for the pool.
* @see #WHEN_EXHAUSTED_BLOCK
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #setWhenExhaustedAction
*/
public static final byte DEFAULT_WHEN_EXHAUSTED_ACTION = WHEN_EXHAUSTED_BLOCK;
/**
* The default maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #getWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
* @see #getMaxWait
* @see #setMaxWait
*/
public static final long DEFAULT_MAX_WAIT = -1L;
/**
* The default "test on borrow" value.
* @see #getTestOnBorrow
* @see #setTestOnBorrow
*/
public static final boolean DEFAULT_TEST_ON_BORROW = false;
/**
* The default "test on return" value.
* @see #getTestOnReturn
* @see #setTestOnReturn
*/
public static final boolean DEFAULT_TEST_ON_RETURN = false;
/**
* The default "test while idle" value.
* @see #getTestWhileIdle
* @see #setTestWhileIdle
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public static final boolean DEFAULT_TEST_WHILE_IDLE = false;
/**
* The default "time between eviction runs" value.
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public static final long DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS = -1L;
/**
* The default number of objects to examine per run in the
* idle object evictor.
* @see #getNumTestsPerEvictionRun
* @see #setNumTestsPerEvictionRun
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public static final int DEFAULT_NUM_TESTS_PER_EVICTION_RUN = 3;
/**
* The default value for {@link #getMinEvictableIdleTimeMillis}.
* @see #getMinEvictableIdleTimeMillis
* @see #setMinEvictableIdleTimeMillis
*/
public static final long DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS = 1000L * 60L * 30L;
/**
* The default minimum level of idle objects in the pool.
* @since Pool 1.3
* @see #setMinIdle
* @see #getMinIdle
*/
public static final int DEFAULT_MIN_IDLE = 0;
/**
* The default LIFO status. True means that borrowObject returns the
* most recently used ("last in") idle object in a pool (if there are
* idle instances available). False means that pools behave as FIFO
* queues - objects are taken from idle object pools in the order that
* they are returned.
* @see #setLifo
*/
public static final boolean DEFAULT_LIFO = true;
//--- constructors -----------------------------------------------
/**
* Create a new <code>GenericKeyedObjectPool</code> with no factory.
*
* @see #GenericKeyedObjectPool(KeyedPoolableObjectFactory)
* @see #setFactory(KeyedPoolableObjectFactory)
*/
public GenericKeyedObjectPool() {
this(null,DEFAULT_MAX_ACTIVE,DEFAULT_WHEN_EXHAUSTED_ACTION,DEFAULT_MAX_WAIT,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory) {
this(factory,DEFAULT_MAX_ACTIVE,DEFAULT_WHEN_EXHAUSTED_ACTION,DEFAULT_MAX_WAIT,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param config a non-<code>null</code> {@link GenericKeyedObjectPool.Config} describing the configuration
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, GenericKeyedObjectPool.Config config) {
this(factory,config.maxActive,config.whenExhaustedAction,config.maxWait,config.maxIdle,config.maxTotal, config.minIdle,config.testOnBorrow,config.testOnReturn,config.timeBetweenEvictionRunsMillis,config.numTestsPerEvictionRun,config.minEvictableIdleTimeMillis,config.testWhileIdle,config.lifo);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive) {
this(factory,maxActive,DEFAULT_WHEN_EXHAUSTED_ACTION,DEFAULT_MAX_WAIT,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait) {
this(factory,maxActive,whenExhaustedAction,maxWait,DEFAULT_MAX_IDLE,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, boolean testOnBorrow, boolean testOnReturn) {
this(factory,maxActive,whenExhaustedAction,maxWait,DEFAULT_MAX_IDLE,testOnBorrow,testOnReturn,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle) {
this(factory,maxActive,whenExhaustedAction,maxWait,maxIdle,DEFAULT_TEST_ON_BORROW,DEFAULT_TEST_ON_RETURN,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #getMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, boolean testOnBorrow, boolean testOnReturn) {
this(factory,maxActive,whenExhaustedAction,maxWait,maxIdle,testOnBorrow,testOnReturn,DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS,DEFAULT_NUM_TESTS_PER_EVICTION_RUN,DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS,DEFAULT_TEST_WHILE_IDLE);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle) {
this(factory, maxActive, whenExhaustedAction, maxWait, maxIdle, GenericKeyedObjectPool.DEFAULT_MAX_TOTAL, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param maxTotal the maximum number of objects that can exists at one time (see {@link #setMaxTotal})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, int maxTotal, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle) {
this(factory, maxActive, whenExhaustedAction, maxWait, maxIdle, maxTotal, GenericKeyedObjectPool.DEFAULT_MIN_IDLE, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param maxTotal the maximum number of objects that can exists at one time (see {@link #setMaxTotal})
* @param minIdle the minimum number of idle objects to have in the pool at any one time (see {@link #setMinIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
* @since Pool 1.3
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, int maxTotal, int minIdle, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle) {
this(factory, maxActive, whenExhaustedAction, maxWait, maxIdle, maxTotal, minIdle, testOnBorrow, testOnReturn, timeBetweenEvictionRunsMillis, numTestsPerEvictionRun, minEvictableIdleTimeMillis, testWhileIdle, DEFAULT_LIFO);
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using the specified values.
* @param factory the <code>KeyedPoolableObjectFactory</code> to use to create, validate, and destroy objects if not <code>null</code>
* @param maxActive the maximum number of objects that can be borrowed from me at one time (see {@link #setMaxActive})
* @param whenExhaustedAction the action to take when the pool is exhausted (see {@link #setWhenExhaustedAction})
* @param maxWait the maximum amount of time to wait for an idle object when the pool is exhausted and <code>whenExhaustedAction</code> is {@link #WHEN_EXHAUSTED_BLOCK} (otherwise ignored) (see {@link #setMaxWait})
* @param maxIdle the maximum number of idle objects in my pool (see {@link #setMaxIdle})
* @param maxTotal the maximum number of objects that can exists at one time (see {@link #setMaxTotal})
* @param minIdle the minimum number of idle objects to have in the pool at any one time (see {@link #setMinIdle})
* @param testOnBorrow whether or not to validate objects before they are returned by the {@link #borrowObject} method (see {@link #setTestOnBorrow})
* @param testOnReturn whether or not to validate objects after they are returned to the {@link #returnObject} method (see {@link #setTestOnReturn})
* @param timeBetweenEvictionRunsMillis the amount of time (in milliseconds) to sleep between examining idle objects for eviction (see {@link #setTimeBetweenEvictionRunsMillis})
* @param numTestsPerEvictionRun the number of idle objects to examine per run within the idle object eviction thread (if any) (see {@link #setNumTestsPerEvictionRun})
* @param minEvictableIdleTimeMillis the minimum number of milliseconds an object can sit idle in the pool before it is eligible for eviction (see {@link #setMinEvictableIdleTimeMillis})
* @param testWhileIdle whether or not to validate objects in the idle object eviction thread, if any (see {@link #setTestWhileIdle})
* @param lifo whether or not the pools behave as LIFO (last in first out) queues (see {@link #setLifo})
* @since Pool 1.4
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory factory, int maxActive, byte whenExhaustedAction, long maxWait, int maxIdle, int maxTotal, int minIdle, boolean testOnBorrow, boolean testOnReturn, long timeBetweenEvictionRunsMillis, int numTestsPerEvictionRun, long minEvictableIdleTimeMillis, boolean testWhileIdle, boolean lifo) {
_factory = factory;
_maxActive = maxActive;
_lifo = lifo;
switch(whenExhaustedAction) {
case WHEN_EXHAUSTED_BLOCK:
case WHEN_EXHAUSTED_FAIL:
case WHEN_EXHAUSTED_GROW:
_whenExhaustedAction = whenExhaustedAction;
break;
default:
throw new IllegalArgumentException("whenExhaustedAction " + whenExhaustedAction + " not recognized.");
}
_maxWait = maxWait;
_maxIdle = maxIdle;
_maxTotal = maxTotal;
_minIdle = minIdle;
_testOnBorrow = testOnBorrow;
_testOnReturn = testOnReturn;
_timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
_numTestsPerEvictionRun = numTestsPerEvictionRun;
_minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
_testWhileIdle = testWhileIdle;
_poolMap = new HashMap();
_poolList = new CursorableLinkedList();
startEvictor(_timeBetweenEvictionRunsMillis);
}
//--- public methods ---------------------------------------------
//--- configuration methods --------------------------------------
/**
* Returns the cap on the number of active instances per key.
* A negative value indicates no limit.
* @return the cap on the number of active instances per key.
* @see #setMaxActive
*/
public synchronized int getMaxActive() {
return _maxActive;
}
/**
* Sets the cap on the number of active instances per key.
* @param maxActive The cap on the number of active instances per key.
* Use a negative value for no limit.
* @see #getMaxActive
*/
public synchronized void setMaxActive(int maxActive) {
_maxActive = maxActive;
notifyAll();
}
/**
* Returns the overall maximum number of objects (across pools) that can
* exist at one time. A negative value indicates no limit.
* @return the maximum number of instances in circulation at one time.
* @see #setMaxTotal
*/
public synchronized int getMaxTotal() {
return _maxTotal;
}
/**
* Sets the cap on the total number of instances from all pools combined.
* When <code>maxTotal</code> is set to a
* positive value and {@link #borrowObject borrowObject} is invoked
* when at the limit with no idle instances available, an attempt is made to
* create room by clearing the oldest 15% of the elements from the keyed
* pools.
*
* @param maxTotal The cap on the total number of instances across pools.
* Use a negative value for no limit.
* @see #getMaxTotal
*/
public synchronized void setMaxTotal(int maxTotal) {
_maxTotal = maxTotal;
notifyAll();
}
/**
* Returns the action to take when the {@link #borrowObject} method
* is invoked when the pool is exhausted (the maximum number
* of "active" objects has been reached).
*
* @return one of {@link #WHEN_EXHAUSTED_BLOCK},
* {@link #WHEN_EXHAUSTED_FAIL} or {@link #WHEN_EXHAUSTED_GROW}
* @see #setWhenExhaustedAction
*/
public synchronized byte getWhenExhaustedAction() {
return _whenExhaustedAction;
}
/**
* Sets the action to take when the {@link #borrowObject} method
* is invoked when the pool is exhausted (the maximum number
* of "active" objects has been reached).
*
* @param whenExhaustedAction the action code, which must be one of
* {@link #WHEN_EXHAUSTED_BLOCK}, {@link #WHEN_EXHAUSTED_FAIL},
* or {@link #WHEN_EXHAUSTED_GROW}
* @see #getWhenExhaustedAction
*/
public synchronized void setWhenExhaustedAction(byte whenExhaustedAction) {
switch(whenExhaustedAction) {
case WHEN_EXHAUSTED_BLOCK:
case WHEN_EXHAUSTED_FAIL:
case WHEN_EXHAUSTED_GROW:
_whenExhaustedAction = whenExhaustedAction;
notifyAll();
break;
default:
throw new IllegalArgumentException("whenExhaustedAction " + whenExhaustedAction + " not recognized.");
}
}
/**
* Returns the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #setWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
*
* When less than or equal to 0, the {@link #borrowObject} method
* may block indefinitely.
*
* @return the maximum number of milliseconds borrowObject will block.
* @see #setMaxWait
* @see #setWhenExhaustedAction
* @see #WHEN_EXHAUSTED_BLOCK
*/
public synchronized long getMaxWait() {
return _maxWait;
}
/**
* Sets the maximum amount of time (in milliseconds) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #setWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
*
* When less than or equal to 0, the {@link #borrowObject} method
* may block indefinitely.
*
* @param maxWait the maximum number of milliseconds borrowObject will block or negative for indefinitely.
* @see #getMaxWait
* @see #setWhenExhaustedAction
* @see #WHEN_EXHAUSTED_BLOCK
*/
public synchronized void setMaxWait(long maxWait) {
_maxWait = maxWait;
}
/**
* Returns the cap on the number of "idle" instances per key.
* @return the maximum number of "idle" instances that can be held
* in a given keyed pool.
* @see #setMaxIdle
*/
public synchronized int getMaxIdle() {
return _maxIdle;
}
/**
* Sets the cap on the number of "idle" instances in the pool.
* If maxIdle is set too low on heavily loaded systems it is possible you
* will see objects being destroyed and almost immediately new objects
* being created. This is a result of the active threads momentarily
* returning objects faster than they are requesting them them, causing the
* number of idle objects to rise above maxIdle. The best value for maxIdle
* for heavily loaded system will vary but the default is a good starting
* point.
* @param maxIdle the maximum number of "idle" instances that can be held
* in a given keyed pool. Use a negative value for no limit.
* @see #getMaxIdle
* @see #DEFAULT_MAX_IDLE
*/
public synchronized void setMaxIdle(int maxIdle) {
_maxIdle = maxIdle;
notifyAll();
}
/**
* Sets the minimum number of idle objects to maintain in each of the keyed
* pools. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0</code> and attempts to ensure
* that each pool has the required minimum number of instances are only
* made during idle object eviction runs.
* @param poolSize - The minimum size of the each keyed pool
* @since Pool 1.3
* @see #getMinIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setMinIdle(int poolSize) {
_minIdle = poolSize;
}
/**
* Returns the minimum number of idle objects to maintain in each of the keyed
* pools. This setting has no effect unless
* <code>timeBetweenEvictionRunsMillis > 0</code> and attempts to ensure
* that each pool has the required minimum number of instances are only
* made during idle object eviction runs.
* @return minimum size of the each keyed pool
* @since Pool 1.3
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized int getMinIdle() {
return _minIdle;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned by the {@link #borrowObject}
* method. If the object fails to validate,
* it will be dropped from the pool, and we will attempt
* to borrow another.
*
* @return <code>true</code> if objects are validated before being borrowed.
* @see #setTestOnBorrow
*/
public boolean getTestOnBorrow() {
return _testOnBorrow;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned by the {@link #borrowObject}
* method. If the object fails to validate,
* it will be dropped from the pool, and we will attempt
* to borrow another.
*
* @param testOnBorrow whether object should be validated before being returned by borrowObject.
* @see #getTestOnBorrow
*/
public void setTestOnBorrow(boolean testOnBorrow) {
_testOnBorrow = testOnBorrow;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned to the pool within the
* {@link #returnObject}.
*
* @return <code>true</code> when objects will be validated before being returned.
* @see #setTestOnReturn
*/
public boolean getTestOnReturn() {
return _testOnReturn;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned to the pool within the
* {@link #returnObject}.
*
* @param testOnReturn <code>true</code> so objects will be validated before being returned.
* @see #getTestOnReturn
*/
public void setTestOnReturn(boolean testOnReturn) {
_testOnReturn = testOnReturn;
}
/**
* Returns the number of milliseconds to sleep between runs of the
* idle object evictor thread.
* When non-positive, no idle object evictor thread will be
* run.
*
* @return milliseconds to sleep between evictor runs.
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized long getTimeBetweenEvictionRunsMillis() {
return _timeBetweenEvictionRunsMillis;
}
/**
* Sets the number of milliseconds to sleep between runs of the
* idle object evictor thread.
* When non-positive, no idle object evictor thread will be
* run.
*
* @param timeBetweenEvictionRunsMillis milliseconds to sleep between evictor runs.
* @see #getTimeBetweenEvictionRunsMillis
*/
public synchronized void setTimeBetweenEvictionRunsMillis(long timeBetweenEvictionRunsMillis) {
_timeBetweenEvictionRunsMillis = timeBetweenEvictionRunsMillis;
startEvictor(_timeBetweenEvictionRunsMillis);
}
/**
* Returns the number of objects to examine during each run of the
* idle object evictor thread (if any).
*
* @return number of objects to examine each eviction run.
* @see #setNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized int getNumTestsPerEvictionRun() {
return _numTestsPerEvictionRun;
}
/**
* Sets the number of objects to examine during each run of the
* idle object evictor thread (if any).
* <p>
* When a negative value is supplied, <code>ceil({@link #getNumIdle()})/abs({@link #getNumTestsPerEvictionRun})</code>
* tests will be run. I.e., when the value is <code>-n</code>, roughly one <code>n</code>th of the
* idle objects will be tested per run.
*
* @param numTestsPerEvictionRun number of objects to examine each eviction run.
* @see #getNumTestsPerEvictionRun
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setNumTestsPerEvictionRun(int numTestsPerEvictionRun) {
_numTestsPerEvictionRun = numTestsPerEvictionRun;
}
/**
* Returns the minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor
* (if any).
*
* @return minimum amount of time an object may sit idle in the pool before it is eligible for eviction.
* @see #setMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized long getMinEvictableIdleTimeMillis() {
return _minEvictableIdleTimeMillis;
}
/**
* Sets the minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor
* (if any).
* When non-positive, no objects will be evicted from the pool
* due to idle time alone.
*
* @param minEvictableIdleTimeMillis minimum amount of time an object may sit idle in the pool before it is eligible for eviction.
* @see #getMinEvictableIdleTimeMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setMinEvictableIdleTimeMillis(long minEvictableIdleTimeMillis) {
_minEvictableIdleTimeMillis = minEvictableIdleTimeMillis;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* by the idle object evictor (if any). If an object
* fails to validate, it will be dropped from the pool.
*
* @return <code>true</code> when objects are validated when borrowed.
* @see #setTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized boolean getTestWhileIdle() {
return _testWhileIdle;
}
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* by the idle object evictor (if any). If an object
* fails to validate, it will be dropped from the pool.
*
* @param testWhileIdle <code>true</code> so objects are validated when borrowed.
* @see #getTestWhileIdle
* @see #setTimeBetweenEvictionRunsMillis
*/
public synchronized void setTestWhileIdle(boolean testWhileIdle) {
_testWhileIdle = testWhileIdle;
}
/**
* Sets the configuration.
* @param conf the new configuration to use.
* @see GenericKeyedObjectPool.Config
*/
public synchronized void setConfig(GenericKeyedObjectPool.Config conf) {
setMaxIdle(conf.maxIdle);
setMaxActive(conf.maxActive);
setMaxTotal(conf.maxTotal);
setMinIdle(conf.minIdle);
setMaxWait(conf.maxWait);
setWhenExhaustedAction(conf.whenExhaustedAction);
setTestOnBorrow(conf.testOnBorrow);
setTestOnReturn(conf.testOnReturn);
setTestWhileIdle(conf.testWhileIdle);
setNumTestsPerEvictionRun(conf.numTestsPerEvictionRun);
setMinEvictableIdleTimeMillis(conf.minEvictableIdleTimeMillis);
setTimeBetweenEvictionRunsMillis(conf.timeBetweenEvictionRunsMillis);
}
/**
* Whether or not the idle object pools act as LIFO queues. True means
* that borrowObject returns the most recently used ("last in") idle object
* in a pool (if there are idle instances available). False means that
* the pools behave as FIFO queues - objects are taken from idle object
* pools in the order that they are returned.
*
* @return <code>true</code> if the pools are configured to act as LIFO queues
* @since 1.4
*/
public synchronized boolean getLifo() {
return _lifo;
}
/**
* Sets the LIFO property of the pools. True means that borrowObject returns
* the most recently used ("last in") idle object in a pool (if there are
* idle instances available). False means that the pools behave as FIFO
* queues - objects are taken from idle object pools in the order that
* they are returned.
*
* @param lifo the new value for the lifo property
* @since 1.4
*/
public synchronized void setLifo(boolean lifo) {
this._lifo = lifo;
}
//-- ObjectPool methods ------------------------------------------
public Object borrowObject(Object key) throws Exception {
long starttime = System.currentTimeMillis();
boolean newlyCreated = false;
for(;;) {
ObjectTimestampPair pair = null;
ObjectQueue pool = null;
synchronized (this) {
assertOpen();
pool = (ObjectQueue)(_poolMap.get(key));
if(null == pool) {
pool = new ObjectQueue();
_poolMap.put(key,pool);
_poolList.add(key);
}
// if there are any sleeping, just grab one of those
try {
pair = (ObjectTimestampPair)(pool.queue.removeFirst());
if(null != pair) {
_totalIdle--;
}
} catch(NoSuchElementException e) { /* ignored */
}
// otherwise
if(null == pair) {
// if there is a totalMaxActive and we are at the limit then
// we have to make room
if ((_maxTotal > 0)
&& (_totalActive + _totalIdle + _totalInternalProcessing >= _maxTotal)) {
clearOldest();
}
// check if we can create one
// (note we know that the num sleeping is 0, else we wouldn't be here)
if ((_maxActive < 0 || pool.activeCount + pool.internalProcessingCount < _maxActive) &&
(_maxTotal < 0 || _totalActive + _totalIdle + _totalInternalProcessing < _maxTotal)) {
Object obj = _factory.makeObject(key);
pair = new ObjectTimestampPair(obj);
newlyCreated = true;
} else {
// the pool is exhausted
switch(_whenExhaustedAction) {
case WHEN_EXHAUSTED_GROW:
Object obj = _factory.makeObject(key);
pair = new ObjectTimestampPair(obj);
break;
case WHEN_EXHAUSTED_FAIL:
throw new NoSuchElementException();
case WHEN_EXHAUSTED_BLOCK:
try {
if(_maxWait <= 0) {
wait();
} else {
// this code may be executed again after a notify then continue cycle
// so, need to calculate the amount of time to wait
final long elapsed = (System.currentTimeMillis() - starttime);
final long waitTime = _maxWait - elapsed;
if (waitTime > 0)
{
wait(waitTime);
}
}
} catch(InterruptedException e) {
// ignored
}
if(_maxWait > 0 && ((System.currentTimeMillis() - starttime) >= _maxWait)) {
throw new NoSuchElementException("Timeout waiting for idle object");
} else {
continue; // keep looping
}
default:
throw new IllegalArgumentException("whenExhaustedAction " + _whenExhaustedAction + " not recognized.");
}
}
}
pool.incrementActiveCount();
}
// Activate. If activate fails, decrement active count and destroy.
// If instance failing activation is new, throw NoSuchElementException;
// otherwise keep looping
try {
_factory.activateObject(key, pair.value);
} catch (Exception e) {
try {
_factory.destroyObject(key,pair.value);
} catch (Exception e2) {
// swallowed
} finally {
synchronized (this) {
pool.decrementActiveCount();
}
}
if(newlyCreated) {
throw new NoSuchElementException(
"Could not create a validated object, cause: "
+ e.getMessage());
}
else {
continue; // keep looping
}
}
// Validate. If validation fails, decrement active count and
// destroy. If instance failing validation is new, throw
// NoSuchElementException; otherwise keep looping
boolean invalid = true;
try {
invalid = _testOnBorrow && !_factory.validateObject(key, pair.value);
} catch (Exception e) {
// swallowed
}
if (invalid) {
try {
_factory.destroyObject(key,pair.value);
} catch (Exception e) {
// swallowed
} finally {
synchronized (this) {
pool.decrementActiveCount();
}
}
if(newlyCreated) {
throw new NoSuchElementException("Could not create a validated object");
} // else keep looping
} else {
return pair.value;
}
}
}
/**
* Clears the pool, removing all pooled instances.
*/
public synchronized void clear() {
for(Iterator entries = _poolMap.entrySet().iterator(); entries.hasNext(); ) {
final Map.Entry entry = (Map.Entry)entries.next();
final Object key = entry.getKey();
final CursorableLinkedList list = ((ObjectQueue)(entry.getValue())).queue;
for(Iterator it = list.iterator(); it.hasNext(); ) {
try {
_factory.destroyObject(key,((ObjectTimestampPair)(it.next())).value);
} catch(Exception e) {
// ignore error, keep destroying the rest
}
it.remove();
}
}
_poolMap.clear();
_poolList.clear();
_totalIdle = 0;
notifyAll();
}
/**
* Method clears oldest 15% of objects in pool. The method sorts the
* objects into a TreeMap and then iterates the first 15% for removal
* @since Pool 1.3
*/
public synchronized void clearOldest() {
// build sorted map of idle objects
final Map map = new TreeMap();
for (Iterator keyiter = _poolMap.keySet().iterator(); keyiter.hasNext();) {
final Object key = keyiter.next();
final CursorableLinkedList list = ((ObjectQueue)_poolMap.get(key)).queue;
for (Iterator it = list.iterator(); it.hasNext();) {
// each item into the map uses the objectimestamppair object
// as the key. It then gets sorted based on the timstamp field
// each value in the map is the parent list it belongs in.
map.put(it.next(), key);
}
}
// Now iterate created map and kill the first 15% plus one to account for zero
Set setPairKeys = map.entrySet();
int itemsToRemove = ((int) (map.size() * 0.15)) + 1;
Iterator iter = setPairKeys.iterator();
while (iter.hasNext() && itemsToRemove > 0) {
Map.Entry entry = (Map.Entry) iter.next();
// kind of backwards on naming. In the map, each key is the objecttimestamppair
// because it has the ordering with the timestamp value. Each value that the
// key references is the key of the list it belongs to.
Object key = entry.getValue();
ObjectTimestampPair pairTimeStamp = (ObjectTimestampPair) entry.getKey();
final CursorableLinkedList list =
((ObjectQueue)(_poolMap.get(key))).queue;
list.remove(pairTimeStamp);
try {
_factory.destroyObject(key, pairTimeStamp.value);
} catch (Exception e) {
// ignore error, keep destroying the rest
}
// if that was the last object for that key, drop that pool
if (list.isEmpty()) {
_poolMap.remove(key);
_poolList.remove(key);
}
_totalIdle--;
itemsToRemove--;
}
notifyAll();
}
/**
* Clears the specified pool, removing all pooled instances corresponding to the given <code>key</code>.
*
* @param key the key to clear
*/
public synchronized void clear(Object key) {
final ObjectQueue pool = (ObjectQueue)(_poolMap.remove(key));
if(null == pool) {
return;
} else {
_poolList.remove(key);
for(Iterator it = pool.queue.iterator(); it.hasNext(); ) {
try {
_factory.destroyObject(key,((ObjectTimestampPair)(it.next())).value);
} catch(Exception e) {
// ignore error, keep destroying the rest
}
it.remove();
_totalIdle--;
}
}
notifyAll();
}
/**
* Returns the total number of instances current borrowed from this pool but not yet returned.
*
* @return the total number of instances currently borrowed from this pool
*/
public synchronized int getNumActive() {
return _totalActive;
}
/**
* Returns the total number of instances currently idle in this pool.
*
* @return the total number of instances currently idle in this pool
*/
public synchronized int getNumIdle() {
return _totalIdle;
}
/**
* Returns the number of instances currently borrowed from but not yet returned
* to the pool corresponding to the given <code>key</code>.
*
* @param key the key to query
* @return the number of instances corresponding to the given <code>key</code> currently borrowed in this pool
*/
public synchronized int getNumActive(Object key) {
final ObjectQueue pool = (ObjectQueue)(_poolMap.get(key));
return pool != null ? pool.activeCount : 0;
}
/**
* Returns the number of instances corresponding to the given <code>key</code> currently idle in this pool.
*
* @param key the key to query
* @return the number of instances corresponding to the given <code>key</code> currently idle in this pool
*/
public synchronized int getNumIdle(Object key) {
final ObjectQueue pool = (ObjectQueue)(_poolMap.get(key));
return pool != null ? pool.queue.size() : 0;
}
public void returnObject(Object key, Object obj) throws Exception {
try {
addObjectToPool(key, obj, true);
} catch (Exception e) {
if (_factory != null) {
try {
_factory.destroyObject(key, obj);
} catch (Exception e2) {
// swallowed
}
// TODO: Correctness here depends on control in addObjectToPool.
// These two methods should be refactored, removing the
// "behavior flag",decrementNumActive, from addObjectToPool.
ObjectQueue pool = (ObjectQueue) (_poolMap.get(key));
if (pool != null) {
synchronized(this) {
pool.decrementActiveCount();
notifyAll();
}
}
}
}
}
private void addObjectToPool(Object key, Object obj,
boolean decrementNumActive) throws Exception {
// if we need to validate this object, do so
boolean success = true; // whether or not this object passed validation
if(_testOnReturn && !_factory.validateObject(key, obj)) {
success = false;
} else {
_factory.passivateObject(key, obj);
}
boolean shouldDestroy = !success;
ObjectQueue pool;
// Add instance to pool if there is room and it has passed validation
// (if testOnreturn is set)
synchronized (this) {
// grab the pool (list) of objects associated with the given key
pool = (ObjectQueue) (_poolMap.get(key));
// if it doesn't exist, create it
if(null == pool) {
pool = new ObjectQueue();
_poolMap.put(key, pool);
_poolList.add(key);
}
if (isClosed()) {
shouldDestroy = true;
} else {
// if there's no space in the pool, flag the object for destruction
// else if we passivated successfully, return it to the pool
if(_maxIdle >= 0 && (pool.queue.size() >= _maxIdle)) {
shouldDestroy = true;
} else if(success) {
// borrowObject always takes the first element from the queue,
// so for LIFO, push on top, FIFO add to end
if (_lifo) {
pool.queue.addFirst(new ObjectTimestampPair(obj));
} else {
pool.queue.addLast(new ObjectTimestampPair(obj));
}
_totalIdle++;
}
}
}
// Destroy the instance if necessary
if(shouldDestroy) {
try {
_factory.destroyObject(key, obj);
} catch(Exception e) {
// ignored?
}
}
// Decrement active count *after* destroy if applicable
if (decrementNumActive) {
synchronized(this) {
pool.decrementActiveCount();
notifyAll();
}
}
}
public void invalidateObject(Object key, Object obj) throws Exception {
try {
_factory.destroyObject(key, obj);
} finally {
synchronized (this) {
ObjectQueue pool = (ObjectQueue) (_poolMap.get(key));
if(null == pool) {
pool = new ObjectQueue();
_poolMap.put(key, pool);
_poolList.add(key);
}
pool.decrementActiveCount();
notifyAll(); // _totalActive has changed
}
}
}
/**
* Create an object using the {@link KeyedPoolableObjectFactory#makeObject factory},
* passivate it, and then place it in the idle object pool.
* <code>addObject</code> is useful for "pre-loading" a pool with idle objects.
*
* @param key the key a new instance should be added to
* @throws Exception when {@link KeyedPoolableObjectFactory#makeObject} fails.
* @throws IllegalStateException when no {@link #setFactory factory} has been set or after {@link #close} has been called on this pool.
*/
public void addObject(Object key) throws Exception {
assertOpen();
if (_factory == null) {
throw new IllegalStateException("Cannot add objects without a factory.");
}
Object obj = _factory.makeObject(key);
try {
assertOpen();
addObjectToPool(key, obj, false);
} catch (IllegalStateException ex) { // Pool closed
try {
_factory.destroyObject(key, obj);
} catch (Exception ex2) {
// swallow
}
throw ex;
}
}
/**
* Registers a key for pool control.
*
* If <code>populateImmediately</code> is <code>true</code> and
* <code>minIdle > 0,</code> the pool under the given key will be
* populated immediately with <code>minIdle</code> idle instances.
*
* @param key - The key to register for pool control.
* @param populateImmediately - If this is <code>true</code>, the pool
* will be populated immediately.
* @since Pool 1.3
*/
public synchronized void preparePool(Object key, boolean populateImmediately) {
ObjectQueue pool = (ObjectQueue)(_poolMap.get(key));
if (null == pool) {
pool = new ObjectQueue();
_poolMap.put(key,pool);
_poolList.add(key);
}
if (populateImmediately) {
try {
// Create the pooled objects
ensureMinIdle(key);
}
catch (Exception e) {
//Do nothing
}
}
}
public void close() throws Exception {
super.close();
synchronized (this) {
clear();
if(null != _evictionCursor) {
_evictionCursor.close();
_evictionCursor = null;
}
if(null != _evictionKeyCursor) {
_evictionKeyCursor.close();
_evictionKeyCursor = null;
}
startEvictor(-1L);
}
}
public synchronized void setFactory(KeyedPoolableObjectFactory factory) throws IllegalStateException {
assertOpen();
if(0 < getNumActive()) {
throw new IllegalStateException("Objects are already active");
} else {
clear();
_factory = factory;
}
}
/**
* <p>Perform <code>numTests</code> idle object eviction tests, evicting
* examined objects that meet the criteria for eviction. If
* <code>testWhileIdle</code> is true, examined objects are validated
* when visited (and removed if invalid); otherwise only objects that
* have been idle for more than <code>minEvicableIdletimeMillis</code>
* are removed.</p>
*
* <p>Successive activations of this method examine objects in keyed pools
* in sequence, cycling through the keys and examining objects in
* oldest-to-youngest order within the keyed pools.</p>
*
* @throws Exception when there is a problem evicting idle objects.
*/
public synchronized void evict() throws Exception {
// Initialize key to last key value
Object key = null;
if (_evictionKeyCursor != null &&
_evictionKeyCursor._lastReturned != null) {
key = _evictionKeyCursor._lastReturned.value();
}
for (int i=0,m=getNumTests(); i<m; i++) {
// make sure pool map is not empty; otherwise do nothing
if (_poolMap == null || _poolMap.size() == 0) {
continue;
}
// if we don't have a key cursor, then create one
if (null == _evictionKeyCursor) {
resetEvictionKeyCursor();
key = null;
}
// if we don't have an object cursor, create one
if (null == _evictionCursor) {
// if the _evictionKeyCursor has a next value, use this key
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else {
// Reset the key cursor and try again
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
if (_evictionCursor == null) {
continue; // should never happen; do nothing
}
// If eviction cursor is exhausted, try to move
// to the next key and reset
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
} else { // Need to reset Key cursor
resetEvictionKeyCursor();
if (_evictionKeyCursor != null) {
if (_evictionKeyCursor.hasNext()) {
key = _evictionKeyCursor.next();
resetEvictionObjectCursor(key);
}
}
}
}
}
if((_lifo && !_evictionCursor.hasPrevious()) ||
(!_lifo && !_evictionCursor.hasNext())) {
continue; // reset failed, do nothing
}
// if LIFO and the _evictionCursor has a previous object,
// or FIFO and _evictionCursor has a next object, test it
ObjectTimestampPair pair = _lifo ?
(ObjectTimestampPair) _evictionCursor.previous() :
(ObjectTimestampPair) _evictionCursor.next();
boolean removeObject=false;
if((_minEvictableIdleTimeMillis > 0) &&
(System.currentTimeMillis() - pair.tstamp >
_minEvictableIdleTimeMillis)) {
removeObject=true;
}
if(_testWhileIdle && removeObject == false) {
boolean active = false;
try {
_factory.activateObject(key,pair.value);
active = true;
} catch(Exception e) {
removeObject=true;
}
if(active) {
if(!_factory.validateObject(key,pair.value)) {
removeObject=true;
} else {
try {
_factory.passivateObject(key,pair.value);
} catch(Exception e) {
removeObject=true;
}
}
}
}
if(removeObject) {
try {
_evictionCursor.remove();
_totalIdle--;
_factory.destroyObject(key, pair.value);
// Do not remove the key from the _poolList or _poolmap,
// even if the list stored in the _poolMap for this key is
// empty when minIdle > 0.
//
// Otherwise if it was the last object for that key,
// drop that pool
if (_minIdle == 0) {
ObjectQueue objectQueue =
(ObjectQueue)_poolMap.get(key);
if (objectQueue != null &&
objectQueue.queue.isEmpty()) {
_poolMap.remove(key);
_poolList.remove(key);
}
}
} catch(Exception e) {
// ignored
}
}
}
}
/**
* Resets the eviction key cursor and closes any
* associated eviction object cursor
*/
private void resetEvictionKeyCursor() {
if (_evictionKeyCursor != null) {
_evictionKeyCursor.close();
}
_evictionKeyCursor = _poolList.cursor();
if (null != _evictionCursor) {
_evictionCursor.close();
_evictionCursor = null;
}
}
/**
* Resets the eviction object cursor for the given key
*
* @param key eviction key
*/
private void resetEvictionObjectCursor(Object key) {
if (_evictionCursor != null) {
_evictionCursor.close();
}
if (_poolMap == null) {
return;
}
ObjectQueue pool = (ObjectQueue) (_poolMap.get(key));
if (pool != null) {
CursorableLinkedList queue = pool.queue;
_evictionCursor = queue.cursor(_lifo ? queue.size() : 0);
}
}
/**
* Iterates through all the known keys and creates any necessary objects to maintain
* the minimum level of pooled objects.
* @see #getMinIdle
* @see #setMinIdle
* @throws Exception If there was an error whilst creating the pooled objects.
*/
private void ensureMinIdle() throws Exception {
//Check if should sustain the pool
if (_minIdle > 0) {
Object[] keysCopy;
synchronized(this) {
// Get the current set of keys
keysCopy = _poolMap.keySet().toArray();
}
// Loop through all elements in _poolList
// Find out the total number of max active and max idle for that class
// If the number is less than the minIdle, do creation loop to boost numbers
for (int i=0; i < keysCopy.length; i++) {
//Get the next key to process
ensureMinIdle(keysCopy[i]);
}
}
}
/**
* Re-creates any needed objects to maintain the minimum levels of
* pooled objects for the specified key.
*
* This method uses {@link #calculateDefecit} to calculate the number
* of objects to be created. {@link #calculateDefecit} can be overridden to
* provide a different method of calculating the number of objects to be
* created.
* @param key The key to process
* @throws Exception If there was an error whilst creating the pooled objects
*/
private void ensureMinIdle(Object key) throws Exception {
// Calculate current pool objects
ObjectQueue pool;
synchronized(this) {
pool = (ObjectQueue)(_poolMap.get(key));
}
if (pool == null) {
return;
}
// this method isn't synchronized so the
// calculateDeficit is done at the beginning
// as a loop limit and a second time inside the loop
// to stop when another thread already returned the
// needed objects
int objectDeficit = calculateDefecit(pool, false);
for (int i = 0; i < objectDeficit && calculateDefecit(pool, true) > 0; i++) {
try {
addObject(key);
} finally {
synchronized (this) {
pool.decrementInternalProcessingCount();
notifyAll();
}
}
}
}
//--- non-public methods ----------------------------------------
/**
* Start the eviction thread or service, or when
* <code>delay</code> is non-positive, stop it
* if it is already running.
*
* @param delay milliseconds between evictor runs.
*/
protected synchronized void startEvictor(long delay) {
if(null != _evictor) {
EvictionTimer.cancel(_evictor);
_evictor = null;
}
if(delay > 0) {
_evictor = new Evictor();
EvictionTimer.schedule(_evictor, delay, delay);
}
}
synchronized String debugInfo() {
StringBuffer buf = new StringBuffer();
buf.append("Active: ").append(getNumActive()).append("\n");
buf.append("Idle: ").append(getNumIdle()).append("\n");
Iterator it = _poolMap.keySet().iterator();
while(it.hasNext()) {
buf.append("\t").append(_poolMap.get(it.next())).append("\n");
}
return buf.toString();
}
private int getNumTests() {
if(_numTestsPerEvictionRun >= 0) {
return _numTestsPerEvictionRun;
} else {
return(int)(Math.ceil(_totalIdle/Math.abs((double)_numTestsPerEvictionRun)));
}
}
/**
* This returns the number of objects to create during the pool
* sustain cycle. This will ensure that the minimum number of idle
* connections is maintained without going past the maxPool value.
* <p>
* This method has been left public so derived classes can override
* the way the defecit is calculated. ie... Increase/decrease the pool
* size at certain times of day to accommodate for usage patterns.
*
* @param key - The key of the pool to calculate the number of
* objects to be re-created
* @param incrementInternal - Should the count of objects currently under
* some form of internal processing be
* incremented?
* @return The number of objects to be created
*/
private synchronized int calculateDefecit(ObjectQueue pool,
boolean incrementInternal) {
int objectDefecit = 0;
//Calculate no of objects needed to be created, in order to have
//the number of pooled objects < maxActive();
objectDefecit = getMinIdle() - pool.queue.size();
if (getMaxActive() > 0) {
int growLimit = Math.max(0, getMaxActive() - pool.activeCount - pool.queue.size() - pool.internalProcessingCount);
objectDefecit = Math.min(objectDefecit, growLimit);
}
// Take the maxTotal limit into account
if (getMaxTotal() > 0) {
int growLimit = Math.max(0, getMaxTotal() - getNumActive() - getNumIdle() - _totalInternalProcessing);
objectDefecit = Math.min(objectDefecit, growLimit);
}
if (incrementInternal && objectDefecit > 0) {
pool.incrementInternalProcessingCount();
}
return objectDefecit;
}
//--- inner classes ----------------------------------------------
/**
* A "struct" that keeps additional information about the actual queue of pooled objects.
*/
private class ObjectQueue {
private int activeCount = 0;
private final CursorableLinkedList queue = new CursorableLinkedList();
private int internalProcessingCount = 0;
void incrementActiveCount() {
_totalActive++;
activeCount++;
}
void decrementActiveCount() {
_totalActive--;
if (activeCount > 0) {
activeCount--;
}
}
void incrementInternalProcessingCount() {
_totalInternalProcessing++;
internalProcessingCount++;
}
void decrementInternalProcessingCount() {
_totalInternalProcessing--;
internalProcessingCount--;
}
}
/**
* A simple "struct" encapsulating an object instance and a timestamp.
*
* Implements Comparable, objects are sorted from old to new.
*
* This is also used by {@link GenericObjectPool}.
*/
static class ObjectTimestampPair implements Comparable {
Object value;
long tstamp;
ObjectTimestampPair(Object val) {
this(val, System.currentTimeMillis());
}
ObjectTimestampPair(Object val, long time) {
value = val;
tstamp = time;
}
public String toString() {
return value + ";" + tstamp;
}
public int compareTo(Object obj) {
return compareTo((ObjectTimestampPair) obj);
}
public int compareTo(ObjectTimestampPair other) {
final long tstampdiff = this.tstamp - other.tstamp;
if (tstampdiff == 0) {
// make sure the natural ordering is consistent with equals
// see java.lang.Comparable Javadocs
return System.identityHashCode(this) - System.identityHashCode(other);
} else {
// handle int overflow
return (int)Math.min(Math.max(tstampdiff, Integer.MIN_VALUE), Integer.MAX_VALUE);
}
}
}
/**
* The idle object evictor {@link TimerTask}.
* @see GenericKeyedObjectPool#setTimeBetweenEvictionRunsMillis
*/
private class Evictor extends TimerTask {
public void run() {
//Evict from the pool
try {
evict();
} catch(Exception e) {
// ignored
} catch(OutOfMemoryError oome) {
// Log problem but give evictor thread a chance to continue in
// case error is recoverable
oome.printStackTrace(System.err);
}
//Re-create the connections.
try {
ensureMinIdle();
} catch (Exception e) {
// ignored
}
}
}
/**
* A simple "struct" encapsulating the
* configuration information for a <code>GenericKeyedObjectPool</code>.
* @see GenericKeyedObjectPool#GenericKeyedObjectPool(KeyedPoolableObjectFactory,GenericKeyedObjectPool.Config)
* @see GenericKeyedObjectPool#setConfig
*/
public static class Config {
/**
* @see GenericKeyedObjectPool#setMaxIdle
*/
public int maxIdle = GenericKeyedObjectPool.DEFAULT_MAX_IDLE;
/**
* @see GenericKeyedObjectPool#setMaxActive
*/
public int maxActive = GenericKeyedObjectPool.DEFAULT_MAX_ACTIVE;
/**
* @see GenericKeyedObjectPool#setMaxTotal
*/
public int maxTotal = GenericKeyedObjectPool.DEFAULT_MAX_TOTAL;
/**
* @see GenericKeyedObjectPool#setMinIdle
*/
public int minIdle = GenericKeyedObjectPool.DEFAULT_MIN_IDLE;
/**
* @see GenericKeyedObjectPool#setMaxWait
*/
public long maxWait = GenericKeyedObjectPool.DEFAULT_MAX_WAIT;
/**
* @see GenericKeyedObjectPool#setWhenExhaustedAction
*/
public byte whenExhaustedAction = GenericKeyedObjectPool.DEFAULT_WHEN_EXHAUSTED_ACTION;
/**
* @see GenericKeyedObjectPool#setTestOnBorrow
*/
public boolean testOnBorrow = GenericKeyedObjectPool.DEFAULT_TEST_ON_BORROW;
/**
* @see GenericKeyedObjectPool#setTestOnReturn
*/
public boolean testOnReturn = GenericKeyedObjectPool.DEFAULT_TEST_ON_RETURN;
/**
* @see GenericKeyedObjectPool#setTestWhileIdle
*/
public boolean testWhileIdle = GenericKeyedObjectPool.DEFAULT_TEST_WHILE_IDLE;
/**
* @see GenericKeyedObjectPool#setTimeBetweenEvictionRunsMillis
*/
public long timeBetweenEvictionRunsMillis = GenericKeyedObjectPool.DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
/**
* @see GenericKeyedObjectPool#setNumTestsPerEvictionRun
*/
public int numTestsPerEvictionRun = GenericKeyedObjectPool.DEFAULT_NUM_TESTS_PER_EVICTION_RUN;
/**
* @see GenericKeyedObjectPool#setMinEvictableIdleTimeMillis
*/
public long minEvictableIdleTimeMillis = GenericKeyedObjectPool.DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/**
* @see GenericKeyedObjectPool#setLifo
*/
public boolean lifo = GenericKeyedObjectPool.DEFAULT_LIFO;
}
//--- protected attributes ---------------------------------------
/**
* The cap on the number of idle instances in the pool.
* @see #setMaxIdle
* @see #getMaxIdle
*/
private int _maxIdle = DEFAULT_MAX_IDLE;
/**
* The minimum no of idle objects to keep in the pool.
* @see #setMinIdle
* @see #getMinIdle
*/
private int _minIdle = DEFAULT_MIN_IDLE;
/**
* The cap on the number of active instances from the pool.
* @see #setMaxActive
* @see #getMaxActive
*/
private int _maxActive = DEFAULT_MAX_ACTIVE;
/**
* The cap on the total number of instances from the pool if non-positive.
* @see #setMaxTotal
* @see #getMaxTotal
*/
private int _maxTotal = DEFAULT_MAX_TOTAL;
/**
* The maximum amount of time (in millis) the
* {@link #borrowObject} method should block before throwing
* an exception when the pool is exhausted and the
* {@link #getWhenExhaustedAction "when exhausted" action} is
* {@link #WHEN_EXHAUSTED_BLOCK}.
*
* When less than or equal to 0, the {@link #borrowObject} method
* may block indefinitely.
*
* @see #setMaxWait
* @see #getMaxWait
* @see #WHEN_EXHAUSTED_BLOCK
* @see #setWhenExhaustedAction
* @see #getWhenExhaustedAction
*/
private long _maxWait = DEFAULT_MAX_WAIT;
/**
* The action to take when the {@link #borrowObject} method
* is invoked when the pool is exhausted (the maximum number
* of "active" objects has been reached).
*
* @see #WHEN_EXHAUSTED_BLOCK
* @see #WHEN_EXHAUSTED_FAIL
* @see #WHEN_EXHAUSTED_GROW
* @see #DEFAULT_WHEN_EXHAUSTED_ACTION
* @see #setWhenExhaustedAction
* @see #getWhenExhaustedAction
*/
private byte _whenExhaustedAction = DEFAULT_WHEN_EXHAUSTED_ACTION;
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned by the {@link #borrowObject}
* method. If the object fails to validate,
* it will be dropped from the pool, and we will attempt
* to borrow another.
*
* @see #setTestOnBorrow
* @see #getTestOnBorrow
*/
private volatile boolean _testOnBorrow = DEFAULT_TEST_ON_BORROW;
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* before being returned to the pool within the
* {@link #returnObject}.
*
* @see #getTestOnReturn
* @see #setTestOnReturn
*/
private volatile boolean _testOnReturn = DEFAULT_TEST_ON_RETURN;
/**
* When <code>true</code>, objects will be
* {@link org.apache.commons.pool.PoolableObjectFactory#validateObject validated}
* by the idle object evictor (if any). If an object
* fails to validate, it will be dropped from the pool.
*
* @see #setTestWhileIdle
* @see #getTestWhileIdle
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private boolean _testWhileIdle = DEFAULT_TEST_WHILE_IDLE;
/**
* The number of milliseconds to sleep between runs of the
* idle object evictor thread.
* When non-positive, no idle object evictor thread will be
* run.
*
* @see #setTimeBetweenEvictionRunsMillis
* @see #getTimeBetweenEvictionRunsMillis
*/
private long _timeBetweenEvictionRunsMillis = DEFAULT_TIME_BETWEEN_EVICTION_RUNS_MILLIS;
/**
* The number of objects to examine during each run of the
* idle object evictor thread (if any).
* <p>
* When a negative value is supplied, <code>ceil({@link #getNumIdle})/abs({@link #getNumTestsPerEvictionRun})</code>
* tests will be run. I.e., when the value is <code>-n</code>, roughly one <code>n</code>th of the
* idle objects will be tested per run.
*
* @see #setNumTestsPerEvictionRun
* @see #getNumTestsPerEvictionRun
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private int _numTestsPerEvictionRun = DEFAULT_NUM_TESTS_PER_EVICTION_RUN;
/**
* The minimum amount of time an object may sit idle in the pool
* before it is eligible for eviction by the idle object evictor
* (if any).
* When non-positive, no objects will be evicted from the pool
* due to idle time alone.
*
* @see #setMinEvictableIdleTimeMillis
* @see #getMinEvictableIdleTimeMillis
* @see #getTimeBetweenEvictionRunsMillis
* @see #setTimeBetweenEvictionRunsMillis
*/
private long _minEvictableIdleTimeMillis = DEFAULT_MIN_EVICTABLE_IDLE_TIME_MILLIS;
/** My hash of pools (ObjectQueue). */
private Map _poolMap = null;
/** The total number of active instances. */
private int _totalActive = 0;
/** The total number of idle instances. */
private int _totalIdle = 0;
/**
* The number of objects subject to some form of internal processing
* (usually creation or destruction) that should be included in the total
* number of objects but are neither active nor idle.
*/
private int _totalInternalProcessing = 0;
/** My {@link KeyedPoolableObjectFactory}. */
private KeyedPoolableObjectFactory _factory = null;
/**
* My idle object eviction {@link TimerTask}, if any.
*/
private Evictor _evictor = null;
/**
* A cursorable list of my pools.
* @see GenericKeyedObjectPool.Evictor#run
*/
private CursorableLinkedList _poolList = null;
private CursorableLinkedList.Cursor _evictionCursor = null;
private CursorableLinkedList.Cursor _evictionKeyCursor = null;
/** Whether or not the pools behave as LIFO queues (last in first out) */
private boolean _lifo = DEFAULT_LIFO;
}
| Align the GKOP borrowObject() code with that in GOP. In addition to making the code easier to understand this also:
- partially addresses POOL-125 since it moves calls to factory methods outside of sync blocks
- ports POOL-102 to GKOP
git-svn-id: a66ef3f0e6c00b14098e182847b4bd646263fa09@762476 13f79535-47bb-0310-9956-ffa450edef68
| src/java/org/apache/commons/pool/impl/GenericKeyedObjectPool.java | Align the GKOP borrowObject() code with that in GOP. In addition to making the code easier to understand this also: - partially addresses POOL-125 since it moves calls to factory methods outside of sync blocks - ports POOL-102 to GKOP |
|
Java | apache-2.0 | 1c35fc883b755c644e09e7d1a325ba937c89bd0d | 0 | cinovo/cloudconductor-server,cinovo/cloudconductor-server,cinovo/cloudconductor-server,cinovo/cloudconductor-server,cinovo/cloudconductor-server | package de.cinovo.cloudconductor.server.repo;
/*
* #%L cloudconductor-server %% Copyright (C) 2013 - 2014 Cinovo AG %% Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License. #L%
*/
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.cinovo.cloudconductor.api.lib.helper.MapperFactory;
import de.cinovo.cloudconductor.api.model.PackageVersion;
import de.cinovo.cloudconductor.server.repo.indexer.IRepoIndexer;
import de.cinovo.cloudconductor.server.repo.provider.IRepoProvider;
import de.cinovo.cloudconductor.server.util.IPackageImport;
/**
* Copyright 2014 Hoegernet<br>
* <br>
*
* @author Thorsten Hoeger
*
*/
@Service("indextask")
public class IndexTask implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(IndexTask.class);
@Autowired
private IRepoProvider repo;
@Autowired
private IRepoIndexer indexer;
@Autowired
private IPackageImport packageImport;
// FIXME initialize OR autowire but not both
@Autowired
private ObjectMapper mapper = MapperFactory.createDefault();
@Override
public void run() {
try {
Set<PackageVersion> latestIndex = this.indexer.getRepoIndex(this.repo);
if (latestIndex != null) {
this.packageImport.importVersions(latestIndex);
}
} catch (Exception e) {
IndexTask.logger.error("Error indexing repo", e);
}
}
}
| src/main/java/de/cinovo/cloudconductor/server/repo/IndexTask.java | package de.cinovo.cloudconductor.server.repo;
/*
* #%L cloudconductor-server %% Copyright (C) 2013 - 2014 Cinovo AG %% Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License. #L%
*/
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import com.fasterxml.jackson.databind.ObjectMapper;
import de.cinovo.cloudconductor.api.lib.helper.MapperFactory;
import de.cinovo.cloudconductor.api.model.PackageVersion;
import de.cinovo.cloudconductor.server.repo.indexer.IRepoIndexer;
import de.cinovo.cloudconductor.server.repo.provider.IRepoProvider;
import de.cinovo.cloudconductor.server.util.IPackageImport;
/**
* Copyright 2014 Hoegernet<br>
* <br>
*
* @author Thorsten Hoeger
*
*/
@Service("indextask")
public class IndexTask implements Runnable {
private static final Logger logger = LoggerFactory.getLogger(IndexTask.class);
@Autowired
private IRepoProvider repo;
@Autowired
private IRepoIndexer indexer;
@Autowired
private IPackageImport packageImport;
@Autowired
private ObjectMapper mapper = MapperFactory.createDefault();
@Override
public void run() {
try {
Set<PackageVersion> latestIndex = this.indexer.getRepoIndex(this.repo);
if (latestIndex != null) {
this.packageImport.importVersions(latestIndex);
}
} catch (Exception e) {
IndexTask.logger.error("Error indexing repo", e);
}
}
}
| add FIXME to strange code
| src/main/java/de/cinovo/cloudconductor/server/repo/IndexTask.java | add FIXME to strange code |
|
Java | apache-2.0 | 723cba35104d9650339aa58a48af6352cae7efdd | 0 | Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus,Hipparchus-Math/hipparchus | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is not the original file distributed by the Apache Software Foundation
* It has been modified by the Hipparchus project
*/
package org.hipparchus.analysis.differentiation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hipparchus.CalculusFieldElementAbstractTest;
import org.hipparchus.Field;
import org.hipparchus.UnitTestUtils;
import org.hipparchus.analysis.CalculusFieldMultivariateFunction;
import org.hipparchus.analysis.CalculusFieldMultivariateVectorFunction;
import org.hipparchus.analysis.polynomials.PolynomialFunction;
import org.hipparchus.exception.LocalizedCoreFormats;
import org.hipparchus.exception.MathIllegalArgumentException;
import org.hipparchus.random.RandomGenerator;
import org.hipparchus.random.Well1024a;
import org.hipparchus.random.Well19937a;
import org.hipparchus.util.ArithmeticUtils;
import org.hipparchus.util.CombinatoricsUtils;
import org.hipparchus.util.Decimal64Field;
import org.hipparchus.util.FastMath;
import org.hipparchus.util.FieldSinCos;
import org.hipparchus.util.FieldSinhCosh;
import org.hipparchus.util.Precision;
import org.junit.Assert;
import org.junit.Test;
/**
* Test for class {@link DerivativeStructure}.
*/
public class DerivativeStructureTest extends CalculusFieldElementAbstractTest<DerivativeStructure> {
@Override
protected DerivativeStructure build(final double x) {
return new DSFactory(2, 1).variable(0, x);
}
@Test(expected=MathIllegalArgumentException.class)
public void testWrongVariableIndex() {
new DSFactory(3, 1).variable(3, 1.0);
}
@Test(expected=MathIllegalArgumentException.class)
public void testMissingOrders() {
new DSFactory(3, 1).variable(0, 1.0).getPartialDerivative(0, 1);
}
@Test(expected=MathIllegalArgumentException.class)
public void testTooLargeOrder() {
new DSFactory(3, 1).variable(0, 1.0).getPartialDerivative(1, 1, 2);
}
@Test
public void testVariableWithoutDerivative0() {
DerivativeStructure v = new DSFactory(1, 0).variable(0, 1.0);
Assert.assertEquals(1.0, v.getValue(), 1.0e-15);
}
@Test(expected=MathIllegalArgumentException.class)
public void testVariableWithoutDerivative1() {
DerivativeStructure v = new DSFactory(1, 0).variable(0, 1.0);
Assert.assertEquals(1.0, v.getPartialDerivative(1), 1.0e-15);
}
@Test
public void testVariable() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0), 1.0, 1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0), 2.0, 0.0, 1.0, 0.0);
checkF0F1(factory.variable(2, 3.0), 3.0, 0.0, 0.0, 1.0);
}
}
@Test
public void testConstant() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.constant(FastMath.PI), FastMath.PI, 0.0, 0.0, 0.0);
}
}
@Test
public void testPrimitiveAdd() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).add(5), 6.0, 1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).add(5), 7.0, 0.0, 1.0, 0.0);
checkF0F1(factory.variable(2, 3.0).add(5), 8.0, 0.0, 0.0, 1.0);
}
}
@Test
public void testAdd() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.add(y.add(z));
checkF0F1(xyz, x.getValue() + y.getValue() + z.getValue(), 1.0, 1.0, 1.0);
}
}
@Test
public void testPrimitiveSubtract() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).subtract(5), -4.0, 1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).subtract(5), -3.0, 0.0, 1.0, 0.0);
checkF0F1(factory.variable(2, 3.0).subtract(5), -2.0, 0.0, 0.0, 1.0);
}
}
@Test
public void testSubtract() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.subtract(y.subtract(z));
checkF0F1(xyz, x.getValue() - (y.getValue() - z.getValue()), 1.0, -1.0, 1.0);
}
}
@Test
public void testPrimitiveMultiply() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).multiply(5), 5.0, 5.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).multiply(5), 10.0, 0.0, 5.0, 0.0);
checkF0F1(factory.variable(2, 3.0).multiply(5), 15.0, 0.0, 0.0, 5.0);
}
}
@Test
public void testMultiply() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.multiply(y.multiply(z));
for (int i = 0; i <= maxOrder; ++i) {
for (int j = 0; j <= maxOrder; ++j) {
for (int k = 0; k <= maxOrder; ++k) {
if (i + j + k <= maxOrder) {
Assert.assertEquals((i == 0 ? x.getValue() : (i == 1 ? 1.0 : 0.0)) *
(j == 0 ? y.getValue() : (j == 1 ? 1.0 : 0.0)) *
(k == 0 ? z.getValue() : (k == 1 ? 1.0 : 0.0)),
xyz.getPartialDerivative(i, j, k),
1.0e-15);
}
}
}
}
}
}
@Test
public void testNegate() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).negate(), -1.0, -1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).negate(), -2.0, 0.0, -1.0, 0.0);
checkF0F1(factory.variable(2, 3.0).negate(), -3.0, 0.0, 0.0, -1.0);
}
}
@Test
public void testReciprocal() {
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure r = new DSFactory(1, 6).variable(0, x).reciprocal();
Assert.assertEquals(1 / x, r.getValue(), 1.0e-15);
for (int i = 1; i < r.getOrder(); ++i) {
double expected = ArithmeticUtils.pow(-1, i) * CombinatoricsUtils.factorial(i) /
FastMath.pow(x, i + 1);
Assert.assertEquals(expected, r.getPartialDerivative(i), 1.0e-15 * FastMath.abs(expected));
}
}
}
@Test
public void testPow() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
for (int n = 0; n < 10; ++n) {
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
List<DerivativeStructure> list = Arrays.asList(x, y, z,
x.add(y).add(z),
x.multiply(y).multiply(z));
if (n == 0) {
for (DerivativeStructure ds : list) {
checkEquals(ds.getField().getOne(), FastMath.pow(ds, n), 1.0e-15);
}
} else if (n == 1) {
for (DerivativeStructure ds : list) {
checkEquals(ds, FastMath.pow(ds, n), 1.0e-15);
}
} else {
for (DerivativeStructure ds : list) {
DerivativeStructure p = ds.getField().getOne();
for (int i = 0; i < n; ++i) {
p = p.multiply(ds);
}
checkEquals(p, FastMath.pow(ds, n), 1.0e-15);
}
}
}
}
}
@Test
public void testPowDoubleDS() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 0.1);
DerivativeStructure y = factory.variable(1, 0.2);
DerivativeStructure z = factory.variable(2, 0.3);
List<DerivativeStructure> list = Arrays.asList(x, y, z,
x.add(y).add(z),
x.multiply(y).multiply(z));
for (DerivativeStructure ds : list) {
// the special case a = 0 is included here
for (double a : new double[] { 0.0, 0.1, 1.0, 2.0, 5.0 }) {
DerivativeStructure reference = (a == 0) ?
x.getField().getZero() :
FastMath.pow(new DSFactory(3, maxOrder).constant(a), ds);
DerivativeStructure result = DerivativeStructure.pow(a, ds);
checkEquals(reference, result, 1.0e-15);
}
}
// negative base: -1^x can be evaluated for integers only, so value is sometimes OK, derivatives are always NaN
DerivativeStructure negEvenInteger = DerivativeStructure.pow(-2.0, factory.variable(0, 2.0));
Assert.assertEquals(4.0, negEvenInteger.getValue(), 1.0e-15);
Assert.assertTrue(Double.isNaN(negEvenInteger.getPartialDerivative(1, 0, 0)));
DerivativeStructure negOddInteger = DerivativeStructure.pow(-2.0, factory.variable(0, 3.0));
Assert.assertEquals(-8.0, negOddInteger.getValue(), 1.0e-15);
Assert.assertTrue(Double.isNaN(negOddInteger.getPartialDerivative(1, 0, 0)));
DerivativeStructure negNonInteger = DerivativeStructure.pow(-2.0, factory.variable(0, 2.001));
Assert.assertTrue(Double.isNaN(negNonInteger.getValue()));
Assert.assertTrue(Double.isNaN(negNonInteger.getPartialDerivative(1, 0, 0)));
DerivativeStructure zeroNeg = DerivativeStructure.pow(0.0, factory.variable(0, -1.0));
Assert.assertTrue(Double.isNaN(zeroNeg.getValue()));
Assert.assertTrue(Double.isNaN(zeroNeg.getPartialDerivative(1, 0, 0)));
DerivativeStructure posNeg = DerivativeStructure.pow(2.0, factory.variable(0, -2.0));
Assert.assertEquals(1.0 / 4.0, posNeg.getValue(), 1.0e-15);
Assert.assertEquals(FastMath.log(2.0) / 4.0, posNeg.getPartialDerivative(1, 0, 0), 1.0e-15);
// very special case: a = 0 and power = 0
DerivativeStructure zeroZero = DerivativeStructure.pow(0.0, factory.variable(0, 0.0));
// this should be OK for simple first derivative with one variable only ...
Assert.assertEquals(1.0, zeroZero.getValue(), 1.0e-15);
Assert.assertEquals(Double.NEGATIVE_INFINITY, zeroZero.getPartialDerivative(1, 0, 0), 1.0e-15);
// the following checks show a LIMITATION of the current implementation
// we have no way to tell x is a pure linear variable x = 0
// we only say: "x is a structure with value = 0.0,
// first derivative with respect to x = 1.0, and all other derivatives
// (first order with respect to y and z and higher derivatives) all 0.0.
// We have function f(x) = a^x and x = 0 so we compute:
// f(0) = 1, f'(0) = ln(a), f''(0) = ln(a)^2. The limit of these values
// when a converges to 0 implies all derivatives keep switching between
// +infinity and -infinity.
//
// Function composition rule for first derivatives is:
// d[f(g(x,y,z))]/dy = f'(g(x,y,z)) * dg(x,y,z)/dy
// so given that in our case x represents g and does not depend
// on y or z, we have dg(x,y,z)/dy = 0
// applying the composition rules gives:
// d[f(g(x,y,z))]/dy = f'(g(x,y,z)) * dg(x,y,z)/dy
// = -infinity * 0
// = NaN
// if we knew x is really the x variable and not the identity
// function applied to x, we would not have computed f'(g(x,y,z)) * dg(x,y,z)/dy
// and we would have found that the result was 0 and not NaN
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 1, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 0, 1)));
// Function composition rule for second derivatives is:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// when function f is the a^x root and x = 0 we have:
// f(0) = 1, f'(0) = ln(a), f''(0) = ln(a)^2 which for a = 0 implies
// all derivatives keep switching between +infinity and -infinity
// so given that in our case x represents g, we have g(x) = 0,
// g'(x) = 1 and g''(x) = 0
// applying the composition rules gives:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// = +infinity * 1^2 + -infinity * 0
// = +infinity + NaN
// = NaN
// if we knew x is really the x variable and not the identity
// function applied to x, we would not have computed f'(g(x)) * g''(x)
// and we would have found that the result was +infinity and not NaN
if (maxOrder > 1) {
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(2, 0, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 2, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 0, 2)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(1, 1, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 1, 1)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(1, 1, 0)));
}
// very special case: 0^0 where the power is a primitive
DerivativeStructure zeroDsZeroDouble = factory.variable(0, 0.0).pow(0.0);
boolean first = true;
for (final double d : zeroDsZeroDouble.getAllDerivatives()) {
if (first) {
Assert.assertEquals(1.0, d, Precision.EPSILON);
first = false;
} else {
Assert.assertEquals(0.0, d, Precision.SAFE_MIN);
}
}
DerivativeStructure zeroDsZeroInt = factory.variable(0, 0.0).pow(0);
first = true;
for (final double d : zeroDsZeroInt.getAllDerivatives()) {
if (first) {
Assert.assertEquals(1.0, d, Precision.EPSILON);
first = false;
} else {
Assert.assertEquals(0.0, d, Precision.SAFE_MIN);
}
}
// 0^p with p smaller than 1.0
DerivativeStructure u = factory.variable(1, -0.0).pow(0.25);
for (int i0 = 0; i0 <= maxOrder; ++i0) {
for (int i1 = 0; i1 <= maxOrder; ++i1) {
for (int i2 = 0; i2 <= maxOrder; ++i2) {
if (i0 + i1 + i2 <= maxOrder) {
Assert.assertEquals(0.0, u.getPartialDerivative(i0, i1, i2), 1.0e-10);
}
}
}
}
}
}
@Test
public void testScalb() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.multiply(y.multiply(z));
double s = 0.125;
for (int n = -3; n <= 3; ++n) {
DerivativeStructure scaled = xyz.scalb(n);
for (int i = 0; i <= maxOrder; ++i) {
for (int j = 0; j <= maxOrder; ++j) {
for (int k = 0; k <= maxOrder; ++k) {
if (i + j + k <= maxOrder) {
Assert.assertEquals((i == 0 ? x.getValue() : (i == 1 ? 1.0 : 0.0)) *
(j == 0 ? y.getValue() : (j == 1 ? 1.0 : 0.0)) *
(k == 0 ? z.getValue() : (k == 1 ? 1.0 : 0.0)) *
s,
scaled.getPartialDerivative(i, j, k),
1.0e-15);
}
}
}
}
s *= 2;
}
}
}
@Test
public void testUlp() {
final RandomGenerator random = new Well19937a(0x85d201920b5be954l);
for (int k = 0; k < 10000; ++k) {
int maxOrder = 1 + random.nextInt(5);
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, FastMath.scalb(2 * random.nextDouble() - 1, random.nextInt(600) - 300));
DerivativeStructure y = factory.variable(1, FastMath.scalb(2 * random.nextDouble() - 1, random.nextInt(600) - 300));
DerivativeStructure z = factory.variable(2, FastMath.scalb(2 * random.nextDouble() - 1, random.nextInt(600) - 300));
DerivativeStructure xyz = x.multiply(y.multiply(z));
DerivativeStructure ulp = xyz.ulp();
boolean first = true;
for (double d : ulp.getAllDerivatives()) {
Assert.assertEquals(first ? FastMath.ulp(xyz.getValue()) : 0.0, d, 1.0e-15 * FastMath.ulp(xyz.getValue()));
first = false;
}
}
}
@Test
public void testExpression() {
DSFactory factory = new DSFactory(3, 5);
double epsilon = 2.5e-13;
for (double x = 0; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
for (double z = 0; z >- 2; z -= 0.2) {
DerivativeStructure dsZ = factory.variable(2, z);
// f(x, y, z) = x + 5 x y - 2 z + (8 z x - y)^3
DerivativeStructure ds =
dsX.linearCombination(1, dsX,
5, dsX.multiply(dsY),
-2, dsZ,
1, dsX.linearCombination(8, dsZ.multiply(dsX),
-1, dsY).pow(3));
DerivativeStructure dsOther =
dsX.linearCombination(1, dsX,
5, dsX.multiply(dsY),
-2, dsZ).add(dsX.linearCombination(8, dsZ.multiply(dsX),
-1, dsY).pow(3));
double f = x + 5 * x * y - 2 * z + FastMath.pow(8 * z * x - y, 3);
Assert.assertEquals(f, ds.getValue(),
FastMath.abs(epsilon * f));
Assert.assertEquals(f, dsOther.getValue(),
FastMath.abs(epsilon * f));
// df/dx = 1 + 5 y + 24 (8 z x - y)^2 z
double dfdx = 1 + 5 * y + 24 * z * FastMath.pow(8 * z * x - y, 2);
Assert.assertEquals(dfdx, ds.getPartialDerivative(1, 0, 0),
FastMath.abs(epsilon * dfdx));
Assert.assertEquals(dfdx, dsOther.getPartialDerivative(1, 0, 0),
FastMath.abs(epsilon * dfdx));
// df/dxdy = 5 + 48 z*(y - 8 z x)
double dfdxdy = 5 + 48 * z * (y - 8 * z * x);
Assert.assertEquals(dfdxdy, ds.getPartialDerivative(1, 1, 0),
FastMath.abs(epsilon * dfdxdy));
Assert.assertEquals(dfdxdy, dsOther.getPartialDerivative(1, 1, 0),
FastMath.abs(epsilon * dfdxdy));
// df/dxdydz = 48 (y - 16 z x)
double dfdxdydz = 48 * (y - 16 * z * x);
Assert.assertEquals(dfdxdydz, ds.getPartialDerivative(1, 1, 1),
FastMath.abs(epsilon * dfdxdydz));
Assert.assertEquals(dfdxdydz, dsOther.getPartialDerivative(1, 1, 1),
FastMath.abs(epsilon * dfdxdydz));
}
}
}
}
@Test
public void testCompositionOneVariableX() {
double epsilon = 1.0e-13;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0.1; y < 1.2; y += 0.1) {
DerivativeStructure dsY = factory.constant(y);
DerivativeStructure f = dsX.divide(dsY).sqrt();
double f0 = FastMath.sqrt(x / y);
Assert.assertEquals(f0, f.getValue(), FastMath.abs(epsilon * f0));
if (f.getOrder() > 0) {
double f1 = 1 / (2 * FastMath.sqrt(x * y));
Assert.assertEquals(f1, f.getPartialDerivative(1), FastMath.abs(epsilon * f1));
if (f.getOrder() > 1) {
double f2 = -f1 / (2 * x);
Assert.assertEquals(f2, f.getPartialDerivative(2), FastMath.abs(epsilon * f2));
if (f.getOrder() > 2) {
double f3 = (f0 + x / (2 * y * f0)) / (4 * x * x * x);
Assert.assertEquals(f3, f.getPartialDerivative(3), FastMath.abs(epsilon * f3));
}
}
}
}
}
}
}
@Test
public void testTrigo() {
double epsilon = 2.0e-12;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0.1; y < 1.2; y += 0.1) {
DerivativeStructure dsY = factory.variable(1, y);
for (double z = 0.1; z < 1.2; z += 0.1) {
DerivativeStructure dsZ = factory.variable(2, z);
DerivativeStructure f = FastMath.sin(dsX.divide(FastMath.cos(dsY).add(FastMath.tan(dsZ))));
double a = FastMath.cos(y) + FastMath.tan(z);
double f0 = FastMath.sin(x / a);
Assert.assertEquals(f0, f.getValue(), FastMath.abs(epsilon * f0));
if (f.getOrder() > 0) {
double dfdx = FastMath.cos(x / a) / a;
Assert.assertEquals(dfdx, f.getPartialDerivative(1, 0, 0), FastMath.abs(epsilon * dfdx));
double dfdy = x * FastMath.sin(y) * dfdx / a;
Assert.assertEquals(dfdy, f.getPartialDerivative(0, 1, 0), FastMath.abs(epsilon * dfdy));
double cz = FastMath.cos(z);
double cz2 = cz * cz;
double dfdz = -x * dfdx / (a * cz2);
Assert.assertEquals(dfdz, f.getPartialDerivative(0, 0, 1), FastMath.abs(epsilon * dfdz));
if (f.getOrder() > 1) {
double df2dx2 = -(f0 / (a * a));
Assert.assertEquals(df2dx2, f.getPartialDerivative(2, 0, 0), FastMath.abs(epsilon * df2dx2));
double df2dy2 = x * FastMath.cos(y) * dfdx / a -
x * x * FastMath.sin(y) * FastMath.sin(y) * f0 / (a * a * a * a) +
2 * FastMath.sin(y) * dfdy / a;
Assert.assertEquals(df2dy2, f.getPartialDerivative(0, 2, 0), FastMath.abs(epsilon * df2dy2));
double c4 = cz2 * cz2;
double df2dz2 = x * (2 * a * (1 - a * cz * FastMath.sin(z)) * dfdx - x * f0 / a ) / (a * a * a * c4);
Assert.assertEquals(df2dz2, f.getPartialDerivative(0, 0, 2), FastMath.abs(epsilon * df2dz2));
double df2dxdy = dfdy / x - x * FastMath.sin(y) * f0 / (a * a * a);
Assert.assertEquals(df2dxdy, f.getPartialDerivative(1, 1, 0), FastMath.abs(epsilon * df2dxdy));
}
}
}
}
}
}
}
@Test
public void testSqrtDefinition() {
double[] epsilon = new double[] { 5.0e-16, 5.0e-16, 2.0e-15, 5.0e-14, 2.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure sqrt1 = dsX.pow(0.5);
DerivativeStructure sqrt2 = FastMath.sqrt(dsX);
DerivativeStructure zero = sqrt1.subtract(sqrt2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testRootNSingularity() {
for (int n = 2; n < 10; ++n) {
for (int maxOrder = 0; maxOrder < 12; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
DerivativeStructure dsZero = factory.variable(0, 0.0);
DerivativeStructure rootN = dsZero.rootN(n);
Assert.assertEquals(0.0, rootN.getValue(), 1.0e-20);
if (maxOrder > 0) {
Assert.assertTrue(Double.isInfinite(rootN.getPartialDerivative(1)));
Assert.assertTrue(rootN.getPartialDerivative(1) > 0);
for (int order = 2; order <= maxOrder; ++order) {
// the following checks shows a LIMITATION of the current implementation
// we have no way to tell dsZero is a pure linear variable x = 0
// we only say: "dsZero is a structure with value = 0.0,
// first derivative = 1.0, second and higher derivatives = 0.0".
// Function composition rule for second derivatives is:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// when function f is the nth root and x = 0 we have:
// f(0) = 0, f'(0) = +infinity, f''(0) = -infinity (and higher
// derivatives keep switching between +infinity and -infinity)
// so given that in our case dsZero represents g, we have g(x) = 0,
// g'(x) = 1 and g''(x) = 0
// applying the composition rules gives:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// = -infinity * 1^2 + +infinity * 0
// = -infinity + NaN
// = NaN
// if we knew dsZero is really the x variable and not the identity
// function applied to x, we would not have computed f'(g(x)) * g''(x)
// and we would have found that the result was -infinity and not NaN
Assert.assertTrue(Double.isNaN(rootN.getPartialDerivative(order)));
}
}
// the following shows that the limitation explained above is NOT a bug...
// if we set up the higher order derivatives for g appropriately, we do
// compute the higher order derivatives of the composition correctly
double[] gDerivatives = new double[ 1 + maxOrder];
gDerivatives[0] = 0.0;
for (int k = 1; k <= maxOrder; ++k) {
gDerivatives[k] = FastMath.pow(-1.0, k + 1);
}
DerivativeStructure correctRoot = factory.build(gDerivatives).rootN(n);
Assert.assertEquals(0.0, correctRoot.getValue(), 1.0e-20);
if (maxOrder > 0) {
Assert.assertTrue(Double.isInfinite(correctRoot.getPartialDerivative(1)));
Assert.assertTrue(correctRoot.getPartialDerivative(1) > 0);
for (int order = 2; order <= maxOrder; ++order) {
Assert.assertTrue(Double.isInfinite(correctRoot.getPartialDerivative(order)));
if ((order % 2) == 0) {
Assert.assertTrue(correctRoot.getPartialDerivative(order) < 0);
} else {
Assert.assertTrue(correctRoot.getPartialDerivative(order) > 0);
}
}
}
}
}
}
@Test
public void testSqrtPow2() {
double[] epsilon = new double[] { 1.0e-16, 3.0e-16, 2.0e-15, 6.0e-14, 6.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.multiply(dsX).sqrt();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCbrtDefinition() {
double[] epsilon = new double[] { 4.0e-16, 9.0e-16, 6.0e-15, 2.0e-13, 4.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure cbrt1 = dsX.pow(1.0 / 3.0);
DerivativeStructure cbrt2 = FastMath.cbrt(dsX);
DerivativeStructure zero = cbrt1.subtract(cbrt2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCbrtPow3() {
double[] epsilon = new double[] { 1.0e-16, 5.0e-16, 8.0e-15, 3.0e-13, 4.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.multiply(dsX.multiply(dsX)).cbrt();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testPowReciprocalPow() {
double[] epsilon = new double[] { 2.0e-15, 2.0e-14, 3.0e-13, 8.0e-12, 3.0e-10 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.01) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0.1; y < 1.2; y += 0.01) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure rebuiltX = dsX.pow(dsY).pow(dsY.reciprocal());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n, m), epsilon[n + m]);
}
}
}
}
}
}
}
@Test
public void testHypotDefinition() {
double epsilon = 1.0e-20;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure hypot = FastMath.hypot(dsY, dsX);
DerivativeStructure ref = dsX.multiply(dsX).add(dsY.multiply(dsY)).sqrt();
DerivativeStructure zero = hypot.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon);
}
}
}
}
}
}
}
@Test
public void testHypotNoOverflow() {
DSFactory factory = new DSFactory(2, 5);
DerivativeStructure dsX = factory.variable(0, +3.0e250);
DerivativeStructure dsY = factory.variable(1, -4.0e250);
DerivativeStructure hypot = FastMath.hypot(dsX, dsY);
Assert.assertEquals(5.0e250, hypot.getValue(), 1.0e235);
Assert.assertEquals(dsX.getValue() / hypot.getValue(), hypot.getPartialDerivative(1, 0), 1.0e-10);
Assert.assertEquals(dsY.getValue() / hypot.getValue(), hypot.getPartialDerivative(0, 1), 1.0e-10);
DerivativeStructure sqrt = dsX.multiply(dsX).add(dsY.multiply(dsY)).sqrt();
Assert.assertTrue(Double.isInfinite(sqrt.getValue()));
}
@Test
public void testHypotNeglectible() {
DSFactory factory = new DSFactory(2, 5);
DerivativeStructure dsSmall = factory.variable(0, +3.0e-10);
DerivativeStructure dsLarge = factory.variable(1, -4.0e25);
Assert.assertEquals(dsLarge.abs().getValue(),
DerivativeStructure.hypot(dsSmall, dsLarge).getValue(),
1.0e-10);
Assert.assertEquals(0,
DerivativeStructure.hypot(dsSmall, dsLarge).getPartialDerivative(1, 0),
1.0e-10);
Assert.assertEquals(-1,
DerivativeStructure.hypot(dsSmall, dsLarge).getPartialDerivative(0, 1),
1.0e-10);
Assert.assertEquals(dsLarge.abs().getValue(),
DerivativeStructure.hypot(dsLarge, dsSmall).getValue(),
1.0e-10);
Assert.assertEquals(0,
DerivativeStructure.hypot(dsLarge, dsSmall).getPartialDerivative(1, 0),
1.0e-10);
Assert.assertEquals(-1,
DerivativeStructure.hypot(dsLarge, dsSmall).getPartialDerivative(0, 1),
1.0e-10);
}
@Test
public void testHypotSpecial() {
DSFactory factory = new DSFactory(2, 5);
Assert.assertTrue(Double.isNaN(DerivativeStructure.hypot(factory.variable(0, Double.NaN),
factory.variable(0, +3.0e250)).getValue()));
Assert.assertTrue(Double.isNaN(DerivativeStructure.hypot(factory.variable(0, +3.0e250),
factory.variable(0, Double.NaN)).getValue()));
Assert.assertTrue(Double.isInfinite(DerivativeStructure.hypot(factory.variable(0, Double.POSITIVE_INFINITY),
factory.variable(0, +3.0e250)).getValue()));
Assert.assertTrue(Double.isInfinite(DerivativeStructure.hypot(factory.variable(0, +3.0e250),
factory.variable(0, Double.POSITIVE_INFINITY)).getValue()));
}
@Test
public void testPrimitiveRemainder() {
double epsilon = 1.0e-15;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure remainder = FastMath.IEEEremainder(dsX, y);
DerivativeStructure ref = dsX.subtract(x - FastMath.IEEEremainder(x, y));
DerivativeStructure zero = remainder.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon);
}
}
}
}
}
}
}
@Test
public void testRemainder() {
double epsilon = 2.0e-15;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure remainder = FastMath.IEEEremainder(dsX, dsY);
DerivativeStructure ref = dsX.subtract(dsY.multiply((x - FastMath.IEEEremainder(x, y)) / y));
DerivativeStructure zero = remainder.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon);
}
}
}
}
}
}
}
@Override
@Test
public void testExp() {
double[] epsilon = new double[] { 1.0e-16, 1.0e-16, 1.0e-16, 1.0e-16, 1.0e-16 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
double refExp = FastMath.exp(x);
DerivativeStructure exp = FastMath.exp(factory.variable(0, x));
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(refExp, exp.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testExpm1Definition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure expm11 = FastMath.expm1(dsX);
DerivativeStructure expm12 = dsX.exp().subtract(dsX.getField().getOne());
DerivativeStructure zero = expm11.subtract(expm12);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon);
}
}
}
}
@Override
@Test
public void testLog() {
double[] epsilon = new double[] { 1.0e-16, 1.0e-16, 3.0e-14, 7.0e-13, 3.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure log = FastMath.log(factory.variable(0, x));
Assert.assertEquals(FastMath.log(x), log.getValue(), epsilon[0]);
for (int n = 1; n <= maxOrder; ++n) {
double refDer = -CombinatoricsUtils.factorial(n - 1) / FastMath.pow(-x, n);
Assert.assertEquals(refDer, log.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLog1pDefinition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
for (double x = 0.1; x < 1.2; x += 0.001) {
DSFactory factory = new DSFactory(1, maxOrder);
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure log1p1 = FastMath.log1p(dsX);
DerivativeStructure log1p2 = FastMath.log(dsX.add(dsX.getField().getOne()));
DerivativeStructure zero = log1p1.subtract(log1p2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon);
}
}
}
}
@Test
public void testLog10Definition() {
double[] epsilon = new double[] { 3.0e-16, 9.0e-16, 8.0e-15, 3.0e-13, 8.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure log101 = FastMath.log10(dsX);
DerivativeStructure log102 = dsX.log().divide(FastMath.log(10.0));
DerivativeStructure zero = log101.subtract(log102);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLogExp() {
double[] epsilon = new double[] { 2.0e-16, 2.0e-16, 3.0e-16, 2.0e-15, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.exp().log();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLog1pExpm1() {
double[] epsilon = new double[] { 6.0e-17, 3.0e-16, 5.0e-16, 9.0e-16, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.expm1().log1p();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLog10Power() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 9.0e-16, 6.0e-15, 6.0e-14 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = factory.constant(10.0).pow(dsX).log10();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testSinCosSeparated() {
double epsilon = 5.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure sin = FastMath.sin(dsX);
DerivativeStructure cos = FastMath.cos(dsX);
double s = FastMath.sin(x);
double c = FastMath.cos(x);
for (int n = 0; n <= maxOrder; ++n) {
switch (n % 4) {
case 0 :
Assert.assertEquals( s, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals( c, cos.getPartialDerivative(n), epsilon);
break;
case 1 :
Assert.assertEquals( c, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals(-s, cos.getPartialDerivative(n), epsilon);
break;
case 2 :
Assert.assertEquals(-s, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals(-c, cos.getPartialDerivative(n), epsilon);
break;
default :
Assert.assertEquals(-c, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals( s, cos.getPartialDerivative(n), epsilon);
break;
}
}
}
}
}
@Test
public void testSinCosCombined() {
double epsilon = 5.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
FieldSinCos<DerivativeStructure> sinCos = FastMath.sinCos(dsX);
double s = FastMath.sin(x);
double c = FastMath.cos(x);
for (int n = 0; n <= maxOrder; ++n) {
switch (n % 4) {
case 0 :
Assert.assertEquals( s, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals( c, sinCos.cos().getPartialDerivative(n), epsilon);
break;
case 1 :
Assert.assertEquals( c, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals(-s, sinCos.cos().getPartialDerivative(n), epsilon);
break;
case 2 :
Assert.assertEquals(-s, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals(-c, sinCos.cos().getPartialDerivative(n), epsilon);
break;
default :
Assert.assertEquals(-c, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals( s, sinCos.cos().getPartialDerivative(n), epsilon);
break;
}
}
}
}
}
@Test
public void testSinAsin() {
double[] epsilon = new double[] { 3.0e-16, 5.0e-16, 3.0e-15, 2.0e-14, 4.0e-13 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.asin(FastMath.sin(dsX));
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCosAcos() {
double[] epsilon = new double[] { 6.0e-16, 6.0e-15, 2.0e-13, 4.0e-12, 2.0e-10 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.acos(FastMath.cos(dsX));
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTanAtan() {
double[] epsilon = new double[] { 6.0e-17, 2.0e-16, 2.0e-15, 4.0e-14, 2.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.atan(FastMath.tan(dsX));
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTangentDefinition() {
double[] epsilon = new double[] { 5.0e-16, 2.0e-15, 3.0e-14, 5.0e-13, 2.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure tan1 = dsX.sin().divide(dsX.cos());
DerivativeStructure tan2 = dsX.tan();
DerivativeStructure zero = tan1.subtract(tan2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Override
@Test
public void testAtan2() {
double[] epsilon = new double[] { 5.0e-16, 3.0e-15, 2.2e-14, 1.0e-12, 8.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure atan2 = FastMath.atan2(dsY, dsX);
DerivativeStructure ref = dsY.divide(dsX).atan();
if (x < 0) {
ref = (y < 0) ? ref.subtract(FastMath.PI) : ref.add(FastMath.PI);
}
DerivativeStructure zero = atan2.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon[n + m]);
}
}
}
}
}
}
}
@Test
public void testAtan2SpecialCasesDerivative() {
DSFactory factory = new DSFactory(2, 2);
DerivativeStructure pp =
DerivativeStructure.atan2(factory.variable(1, +0.0),
factory.variable(1, +0.0));
Assert.assertEquals(0, pp.getValue(), 1.0e-15);
Assert.assertEquals(+1, FastMath.copySign(1, pp.getValue()), 1.0e-15);
DerivativeStructure pn =
DerivativeStructure.atan2(factory.variable(1, +0.0),
factory.variable(1, -0.0));
Assert.assertEquals(FastMath.PI, pn.getValue(), 1.0e-15);
DerivativeStructure np =
DerivativeStructure.atan2(factory.variable(1, -0.0),
factory.variable(1, +0.0));
Assert.assertEquals(0, np.getValue(), 1.0e-15);
Assert.assertEquals(-1, FastMath.copySign(1, np.getValue()), 1.0e-15);
DerivativeStructure nn =
DerivativeStructure.atan2(factory.variable(1, -0.0),
factory.variable(1, -0.0));
Assert.assertEquals(-FastMath.PI, nn.getValue(), 1.0e-15);
}
@Test
public void testSinhCoshCombined() {
double epsilon = 5.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
FieldSinhCosh<DerivativeStructure> sinhCosh = FastMath.sinhCosh(dsX);
double sh = FastMath.sinh(x);
double ch = FastMath.cosh(x);
for (int n = 0; n <= maxOrder; ++n) {
if (n % 2 == 0) {
Assert.assertEquals(sh, sinhCosh.sinh().getPartialDerivative(n), epsilon);
Assert.assertEquals(ch, sinhCosh.cosh().getPartialDerivative(n), epsilon);
} else {
Assert.assertEquals(ch, sinhCosh.sinh().getPartialDerivative(n), epsilon);
Assert.assertEquals(sh, sinhCosh.cosh().getPartialDerivative(n), epsilon);
}
}
}
}
}
@Test
public void testSinhDefinition() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 5.0e-16, 2.0e-15, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure sinh1 = dsX.exp().subtract(dsX.exp().reciprocal()).multiply(0.5);
DerivativeStructure sinh2 = FastMath.sinh(dsX);
DerivativeStructure zero = sinh1.subtract(sinh2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCoshDefinition() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 5.0e-16, 2.0e-15, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure cosh1 = dsX.exp().add(dsX.exp().reciprocal()).multiply(0.5);
DerivativeStructure cosh2 = FastMath.cosh(dsX);
DerivativeStructure zero = cosh1.subtract(cosh2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTanhDefinition() {
double[] epsilon = new double[] { 3.0e-16, 5.0e-16, 7.0e-16, 3.0e-15, 2.0e-14 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure tanh1 = dsX.exp().subtract(dsX.exp().reciprocal()).divide(dsX.exp().add(dsX.exp().reciprocal()));
DerivativeStructure tanh2 = FastMath.tanh(dsX);
DerivativeStructure zero = tanh1.subtract(tanh2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testSinhAsinh() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 4.0e-16, 7.0e-16, 3.0e-15, 8.0e-15 };
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.asinh(dsX.sinh());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCoshAcosh() {
double[] epsilon = new double[] { 2.0e-15, 1.0e-14, 2.0e-13, 6.0e-12, 3.0e-10, 2.0e-8 };
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.acosh(dsX.cosh());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTanhAtanh() {
double[] epsilon = new double[] { 3.0e-16, 2.0e-16, 7.0e-16, 4.0e-15, 3.0e-14, 4.0e-13 };
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.atanh(dsX.tanh());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCompositionOneVariableY() {
double epsilon = 1.0e-13;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.constant(x);
for (double y = 0.1; y < 1.2; y += 0.1) {
DerivativeStructure dsY = factory.variable(0, y);
DerivativeStructure f = dsX.divide(dsY).sqrt();
double f0 = FastMath.sqrt(x / y);
Assert.assertEquals(f0, f.getValue(), FastMath.abs(epsilon * f0));
if (f.getOrder() > 0) {
double f1 = -x / (2 * y * y * f0);
Assert.assertEquals(f1, f.getPartialDerivative(1), FastMath.abs(epsilon * f1));
if (f.getOrder() > 1) {
double f2 = (f0 - x / (4 * y * f0)) / (y * y);
Assert.assertEquals(f2, f.getPartialDerivative(2), FastMath.abs(epsilon * f2));
if (f.getOrder() > 2) {
double f3 = (x / (8 * y * f0) - 2 * f0) / (y * y * y);
Assert.assertEquals(f3, f.getPartialDerivative(3), FastMath.abs(epsilon * f3));
}
}
}
}
}
}
}
@Test
public void testTaylorPolynomial() {
DSFactory factory = new DSFactory(3, 4);
for (double x = 0; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0; y < 1.2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
for (double z = 0; z < 1.2; z += 0.2) {
DerivativeStructure dsZ = factory.variable(2, z);
DerivativeStructure f = dsX.multiply(dsY).add(dsZ).multiply(dsX).multiply(dsY);
for (double dx = -0.2; dx < 0.2; dx += 0.2) {
for (double dy = -0.2; dy < 0.2; dy += 0.1) {
for (double dz = -0.2; dz < 0.2; dz += 0.1) {
double ref = (x + dx) * (y + dy) * ((x + dx) * (y + dy) + (z + dz));
Assert.assertEquals(ref, f.taylor(dx, dy, dz), 2.0e-15);
}
}
}
}
}
}
}
@Test
public void testTaylorAtan2() {
double[] expected = new double[] { 0.214, 0.0241, 0.00422, 6.48e-4, 8.04e-5 };
double x0 = 0.1;
double y0 = -0.3;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
DerivativeStructure dsX = factory.variable(0, x0);
DerivativeStructure dsY = factory.variable(1, y0);
DerivativeStructure atan2 = DerivativeStructure.atan2(dsY, dsX);
double maxError = 0;
for (double dx = -0.05; dx < 0.05; dx += 0.001) {
for (double dy = -0.05; dy < 0.05; dy += 0.001) {
double ref = FastMath.atan2(y0 + dy, x0 + dx);
maxError = FastMath.max(maxError, FastMath.abs(ref - atan2.taylor(dx, dy)));
}
}
Assert.assertEquals(0.0, expected[maxOrder] - maxError, 0.01 * expected[maxOrder]);
}
}
@Test
public void testAbs() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure minusOne = factory.variable(0, -1.0);
Assert.assertEquals(+1.0, FastMath.abs(minusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.abs(minusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusOne = factory.variable(0, +1.0);
Assert.assertEquals(+1.0, FastMath.abs(plusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.abs(plusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure minusZero = factory.variable(0, -0.0);
Assert.assertEquals(+0.0, FastMath.abs(minusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.abs(minusZero).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusZero = factory.variable(0, +0.0);
Assert.assertEquals(+0.0, FastMath.abs(plusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.abs(plusZero).getPartialDerivative(1), 1.0e-15);
}
@Override
@Test
public void testSign() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure minusOne = factory.variable(0, -1.0);
Assert.assertEquals(-1.0, FastMath.sign(minusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals( 0.0, FastMath.sign(minusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusOne = factory.variable(0, +1.0);
Assert.assertEquals(+1.0, FastMath.sign(plusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals( 0.0, FastMath.sign(plusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure minusZero = factory.variable(0, -0.0);
Assert.assertEquals(-0.0, FastMath.sign(minusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertTrue(Double.doubleToLongBits(FastMath.sign(minusZero).getValue()) < 0);
Assert.assertEquals( 0.0, FastMath.sign(minusZero).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusZero = factory.variable(0, +0.0);
Assert.assertEquals(+0.0, FastMath.sign(plusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertTrue(Double.doubleToLongBits(FastMath.sign(plusZero).getValue()) == 0);
Assert.assertEquals( 0.0, FastMath.sign(plusZero).getPartialDerivative(1), 1.0e-15);
}
@Test
public void testCeilFloorRintLong() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure x = factory.variable(0, -1.5);
Assert.assertEquals(-1.5, x.getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, x.getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.ceil(x).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+0.0, FastMath.ceil(x).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-2.0, FastMath.floor(x).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+0.0, FastMath.floor(x).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-2.0, FastMath.rint(x).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+0.0, FastMath.rint(x).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-2.0, x.subtract(x.getField().getOne()).rint().getPartialDerivative(0), 1.0e-15);
}
@Test
public void testCopySign() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure minusOne = factory.variable(0, -1.0);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, +1.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, +1.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, -1.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, -1.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, +0.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, +0.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, -0.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, -0.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, Double.NaN).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, Double.NaN).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusOne = factory.variable(0, +1.0);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+1.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+1.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-1.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-1.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+0.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+0.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-0.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-0.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(Double.NaN)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(Double.NaN)).getPartialDerivative(1), 1.0e-15);
}
@Test
public void testToDegreesDefinition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
Assert.assertEquals(FastMath.toDegrees(x), dsX.toDegrees().getValue(), epsilon);
for (int n = 1; n <= maxOrder; ++n) {
if (n == 1) {
Assert.assertEquals(180 / FastMath.PI, dsX.toDegrees().getPartialDerivative(1), epsilon);
} else {
Assert.assertEquals(0.0, dsX.toDegrees().getPartialDerivative(n), epsilon);
}
}
}
}
}
@Test
public void testToRadiansDefinition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
Assert.assertEquals(FastMath.toRadians(x), dsX.toRadians().getValue(), epsilon);
for (int n = 1; n <= maxOrder; ++n) {
if (n == 1) {
Assert.assertEquals(FastMath.PI / 180, dsX.toRadians().getPartialDerivative(1), epsilon);
} else {
Assert.assertEquals(0.0, dsX.toRadians().getPartialDerivative(n), epsilon);
}
}
}
}
}
@Test
public void testDegRad() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.toDegrees().toRadians();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon);
}
}
}
}
@Test(expected=MathIllegalArgumentException.class)
public void testComposeMismatchedDimensions() {
new DSFactory(1, 3).variable(0, 1.2).compose(new double[3]);
}
@Test
public void testCompose() {
double[] epsilon = new double[] { 1.0e-20, 5.0e-14, 2.0e-13, 3.0e-13, 2.0e-13, 1.0e-20 };
PolynomialFunction poly =
new PolynomialFunction(new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 });
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
PolynomialFunction[] p = new PolynomialFunction[maxOrder + 1];
p[0] = poly;
for (int i = 1; i <= maxOrder; ++i) {
p[i] = p[i - 1].polynomialDerivative();
}
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure dsY1 = dsX.getField().getZero();
for (int i = poly.degree(); i >= 0; --i) {
dsY1 = dsY1.multiply(dsX).add(poly.getCoefficients()[i]);
}
double[] f = new double[maxOrder + 1];
for (int i = 0; i < f.length; ++i) {
f[i] = p[i].value(x);
}
DerivativeStructure dsY2 = dsX.compose(f);
DerivativeStructure zero = dsY1.subtract(dsY2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testIntegration() {
// check that first-order integration on two variables does not depend on sequence of operations
final RandomGenerator random = new Well19937a(0x87bb96d6e11557bdl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
final DerivativeStructure i2fIxIy = f.integrate(0, 1).integrate(1, 1);
final DerivativeStructure i2fIyIx = f.integrate(1, 1).integrate(0, 1);
checkEquals(i2fIxIy, i2fIyIx, 0.);
}
}
@Test
public void testIntegrationGreaterThanOrder() {
// check that integration to a too high order generates zero
// as integration constants are set to zero
final RandomGenerator random = new Well19937a(0x4744a847b11e4c6fl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
for (int index = 0; index < factory.getCompiler().getFreeParameters(); ++index) {
final DerivativeStructure integ = f.integrate(index, factory.getCompiler().getOrder() + 1);
checkEquals(factory.constant(0), integ, 0.);
}
}
}
@Test
public void testIntegrationNoOp() {
// check that integration of order 0 is no-op
final RandomGenerator random = new Well19937a(0x75a35152f30f644bl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
for (int index = 0; index < factory.getCompiler().getFreeParameters(); ++index) {
final DerivativeStructure integ = f.integrate(index, 0);
checkEquals(f, integ, 0.);
}
}
}
@Test
public void testDifferentiationNoOp() {
// check that differentiation of order 0 is no-op
final RandomGenerator random = new Well19937a(0x3b6ae4c2f1282949l);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
for (int index = 0; index < factory.getCompiler().getFreeParameters(); ++index) {
final DerivativeStructure integ = f.differentiate(index, 0);
checkEquals(f, integ, 0.);
}
}
}
@Test
public void testIntegrationDifferentiation() {
// check that integration and differentiation for univariate functions are each other inverse except for constant
// term and highest order one
final RandomGenerator random = new Well19937a(0x67fe66c05e5ee222l);
final DSFactory factory = new DSFactory(1, 25);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 1; i < size - 1; i++) {
data[i] = random.nextDouble();
}
final int indexVar = 0;
final DerivativeStructure f = factory.build(data);
final DerivativeStructure f2 = f.integrate(indexVar, 1).differentiate(indexVar, 1);
final DerivativeStructure f3 = f.differentiate(indexVar, 1).integrate(indexVar, 1);
checkEquals(f2, f, 0.);
checkEquals(f2, f3, 0.);
// check special case when non-positive integration order actually returns differentiation
final DerivativeStructure df = f.integrate(indexVar, -1);
final DerivativeStructure df2 = f.differentiate(indexVar, 1);
checkEquals(df, df2, 0.);
// check special case when non-positive differentiation order actually returns integration
final DerivativeStructure fi = f.differentiate(indexVar, -1);
final DerivativeStructure fi2 = f.integrate(indexVar, 1);
checkEquals(fi, fi2, 0.);
}
}
@Test
public void testDifferentiation1() {
// check differentiation operator with result obtained manually
final int freeParam = 3;
final int order = 5;
final DSFactory factory = new DSFactory(freeParam, order);
final DerivativeStructure f = factory.variable(0, 1.0);
final int[] orders = new int[freeParam];
orders[0] = 2;
orders[1] = 1;
orders[2] = 1;
final double value = 10.;
f.setDerivativeComponent(factory.getCompiler().getPartialDerivativeIndex(orders), value);
final DerivativeStructure dfDx = f.differentiate(0, 1);
orders[0] -= 1;
Assert.assertEquals(1., dfDx.getPartialDerivative(new int[freeParam]), 0.);
Assert.assertEquals(value, dfDx.getPartialDerivative(orders), 0.);
checkEquals(factory.constant(0), f.differentiate(0, order + 1), 0.);
}
@Test
public void testDifferentiation2() {
// check that first-order differentiation twice is same as second-order differentiation
final RandomGenerator random = new Well19937a(0xec293aaee352de94l);
final DSFactory factory = new DSFactory(5, 4);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
final DerivativeStructure d2fDx2 = f.differentiate(0, 1).differentiate(0, 1);
final DerivativeStructure d2fDx2Bis = f.differentiate(0, 2);
checkEquals(d2fDx2, d2fDx2Bis, 0.);
}
}
@Test
public void testDifferentiation3() {
// check that first-order differentiation on two variables does not depend on sequence of operations
final RandomGenerator random = new Well19937a(0x35409ecc1348e46cl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
final DerivativeStructure d2fDxDy = f.differentiate(0, 1).differentiate(1, 1);
final DerivativeStructure d2fDyDx = f.differentiate(1, 1).differentiate(0, 1);
checkEquals(d2fDxDy, d2fDyDx, 0.);
}
}
@Test
public void testField() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
checkF0F1(x.getField().getZero(), 0.0, 0.0, 0.0, 0.0);
checkF0F1(x.getField().getOne(), 1.0, 0.0, 0.0, 0.0);
Assert.assertEquals(maxOrder, x.getField().getZero().getOrder());
Assert.assertEquals(3, x.getField().getZero().getFreeParameters());
Assert.assertEquals(DerivativeStructure.class, x.getField().getRuntimeClass());
}
}
@Test
public void testOneParameterConstructor() {
double x = 1.2;
double cos = FastMath.cos(x);
double sin = FastMath.sin(x);
DSFactory factory = new DSFactory(1, 4);
DerivativeStructure yRef = factory.variable(0, x).cos();
try {
new DSFactory(1, 4).build(0.0, 0.0);
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException dme) {
// expected
} catch (Exception e) {
Assert.fail("wrong exceptionc caught " + e.getClass().getName());
}
double[] derivatives = new double[] { cos, -sin, -cos, sin, cos };
DerivativeStructure y = factory.build(derivatives);
checkEquals(yRef, y, 1.0e-15);
UnitTestUtils.assertEquals(derivatives, y.getAllDerivatives(), 1.0e-15);
}
@Test
public void testOneOrderConstructor() {
DSFactory factory = new DSFactory(3, 1);
double x = 1.2;
double y = 2.4;
double z = 12.5;
DerivativeStructure xRef = factory.variable(0, x);
DerivativeStructure yRef = factory.variable(1, y);
DerivativeStructure zRef = factory.variable(2, z);
try {
new DSFactory(3, 1).build(x + y - z, 1.0, 1.0);
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException dme) {
// expected
} catch (Exception e) {
Assert.fail("wrong exceptionc caught " + e.getClass().getName());
}
double[] derivatives = new double[] { x + y - z, 1.0, 1.0, -1.0 };
DerivativeStructure t = factory.build(derivatives);
checkEquals(xRef.add(yRef.subtract(zRef)), t, 1.0e-15);
UnitTestUtils.assertEquals(derivatives, xRef.add(yRef.subtract(zRef)).getAllDerivatives(), 1.0e-15);
}
@Test
public void testLinearCombination1DSDS() {
DSFactory factory = new DSFactory(6, 1);
final DerivativeStructure[] a = new DerivativeStructure[] {
factory.variable(0, -1321008684645961.0 / 268435456.0),
factory.variable(1, -5774608829631843.0 / 268435456.0),
factory.variable(2, -7645843051051357.0 / 8589934592.0)
};
final DerivativeStructure[] b = new DerivativeStructure[] {
factory.variable(3, -5712344449280879.0 / 2097152.0),
factory.variable(4, -4550117129121957.0 / 2097152.0),
factory.variable(5, 8846951984510141.0 / 131072.0)
};
final DerivativeStructure abSumInline = a[0].linearCombination(a[0], b[0], a[1], b[1], a[2], b[2]);
final DerivativeStructure abSumArray = a[0].linearCombination(a, b);
Assert.assertEquals(abSumInline.getValue(), abSumArray.getValue(), 0);
Assert.assertEquals(-1.8551294182586248737720779899, abSumInline.getValue(), 1.0e-15);
Assert.assertEquals(b[0].getValue(), abSumInline.getPartialDerivative(1, 0, 0, 0, 0, 0), 1.0e-15);
Assert.assertEquals(b[1].getValue(), abSumInline.getPartialDerivative(0, 1, 0, 0, 0, 0), 1.0e-15);
Assert.assertEquals(b[2].getValue(), abSumInline.getPartialDerivative(0, 0, 1, 0, 0, 0), 1.0e-15);
Assert.assertEquals(a[0].getValue(), abSumInline.getPartialDerivative(0, 0, 0, 1, 0, 0), 1.0e-15);
Assert.assertEquals(a[1].getValue(), abSumInline.getPartialDerivative(0, 0, 0, 0, 1, 0), 1.0e-15);
Assert.assertEquals(a[2].getValue(), abSumInline.getPartialDerivative(0, 0, 0, 0, 0, 1), 1.0e-15);
}
@Test
public void testLinearCombination1DoubleDS() {
DSFactory factory = new DSFactory(3, 1);
final double[] a = new double[] {
-1321008684645961.0 / 268435456.0,
-5774608829631843.0 / 268435456.0,
-7645843051051357.0 / 8589934592.0
};
final DerivativeStructure[] b = new DerivativeStructure[] {
factory.variable(0, -5712344449280879.0 / 2097152.0),
factory.variable(1, -4550117129121957.0 / 2097152.0),
factory.variable(2, 8846951984510141.0 / 131072.0)
};
final DerivativeStructure abSumInline = b[0].linearCombination(a[0], b[0],
a[1], b[1],
a[2], b[2]);
final DerivativeStructure abSumArray = b[0].linearCombination(a, b);
Assert.assertEquals(abSumInline.getValue(), abSumArray.getValue(), 0);
Assert.assertEquals(-1.8551294182586248737720779899, abSumInline.getValue(), 1.0e-15);
Assert.assertEquals(a[0], abSumInline.getPartialDerivative(1, 0, 0), 1.0e-15);
Assert.assertEquals(a[1], abSumInline.getPartialDerivative(0, 1, 0), 1.0e-15);
Assert.assertEquals(a[2], abSumInline.getPartialDerivative(0, 0, 1), 1.0e-15);
}
@Test
public void testLinearCombination2DSDS() {
// we compare accurate versus naive dot product implementations
// on regular vectors (i.e. not extreme cases like in the previous test)
Well1024a random = new Well1024a(0xc6af886975069f11l);
DSFactory factory = new DSFactory(4, 1);
for (int i = 0; i < 10000; ++i) {
final DerivativeStructure[] u = new DerivativeStructure[factory.getCompiler().getFreeParameters()];
final DerivativeStructure[] v = new DerivativeStructure[factory.getCompiler().getFreeParameters()];
for (int j = 0; j < u.length; ++j) {
u[j] = factory.variable(j, 1e17 * random.nextDouble());
v[j] = factory.constant(1e17 * random.nextDouble());
}
DerivativeStructure lin = u[0].linearCombination(u[0], v[0], u[1], v[1]);
double ref = u[0].getValue() * v[0].getValue() +
u[1].getValue() * v[1].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(v[0].getValue(), lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(v[1].getValue(), lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
lin = u[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2]);
ref = u[0].getValue() * v[0].getValue() +
u[1].getValue() * v[1].getValue() +
u[2].getValue() * v[2].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(v[0].getValue(), lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(v[1].getValue(), lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(v[2].getValue(), lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
lin = u[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2], u[3], v[3]);
ref = u[0].getValue() * v[0].getValue() +
u[1].getValue() * v[1].getValue() +
u[2].getValue() * v[2].getValue() +
u[3].getValue() * v[3].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(v[0].getValue(), lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(v[1].getValue(), lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(v[2].getValue(), lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
Assert.assertEquals(v[3].getValue(), lin.getPartialDerivative(0, 0, 0, 1), 1.0e-15 * FastMath.abs(v[3].getValue()));
}
}
@Test
public void testLinearCombination2DoubleDS() {
// we compare accurate versus naive dot product implementations
// on regular vectors (i.e. not extreme cases like in the previous test)
Well1024a random = new Well1024a(0xc6af886975069f11l);
DSFactory factory = new DSFactory(4, 1);
for (int i = 0; i < 10000; ++i) {
final double[] u = new double[4];
final DerivativeStructure[] v = new DerivativeStructure[factory.getCompiler().getFreeParameters()];
for (int j = 0; j < u.length; ++j) {
u[j] = 1e17 * random.nextDouble();
v[j] = factory.variable(j, 1e17 * random.nextDouble());
}
DerivativeStructure lin = v[0].linearCombination(u[0], v[0], u[1], v[1]);
double ref = u[0] * v[0].getValue() +
u[1] * v[1].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(u[0], lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(u[1], lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
lin = v[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2]);
ref = u[0] * v[0].getValue() +
u[1] * v[1].getValue() +
u[2] * v[2].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(u[0], lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(u[1], lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(u[2], lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
lin = v[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2], u[3], v[3]);
ref = u[0] * v[0].getValue() +
u[1] * v[1].getValue() +
u[2] * v[2].getValue() +
u[3] * v[3].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(u[0], lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(u[1], lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(u[2], lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
Assert.assertEquals(u[3], lin.getPartialDerivative(0, 0, 0, 1), 1.0e-15 * FastMath.abs(v[3].getValue()));
}
}
@Test
public void testSerialization() {
DerivativeStructure a = new DSFactory(3, 2).variable(0, 1.3);
DerivativeStructure b = (DerivativeStructure) UnitTestUtils.serializeAndRecover(a);
Assert.assertEquals(a.getFreeParameters(), b.getFreeParameters());
Assert.assertEquals(a.getOrder(), b.getOrder());
checkEquals(a, b, 1.0e-15);
}
@Test
public void testZero() {
DerivativeStructure zero = new DSFactory(3, 2).variable(2, 17.0).getField().getZero();
double[] a = zero.getAllDerivatives();
Assert.assertEquals(10, a.length);
for (int i = 0; i < a.length; ++i) {
Assert.assertEquals(0.0, a[i], 1.0e-15);
}
}
@Test
public void testOne() {
DerivativeStructure one = new DSFactory(3, 2).variable(2, 17.0).getField().getOne();
double[] a = one.getAllDerivatives();
Assert.assertEquals(10, a.length);
for (int i = 0; i < a.length; ++i) {
Assert.assertEquals(i == 0 ? 1.0 : 0.0, a[i], 1.0e-15);
}
}
@Test
public void testMap() {
List<int[]> pairs = new ArrayList<>();
for (int parameters = 1; parameters < 5; ++parameters) {
for (int order = 0; order < 3; ++order) {
pairs.add(new int[] { parameters, order });
}
}
Map<Field<?>, Integer> map = new HashMap<>();
for (int i = 0; i < 1000; ++i) {
// create a brand new factory for each derivative
int parameters = pairs.get(i % pairs.size())[0];
int order = pairs.get(i % pairs.size())[1];
map.put(new DSFactory(parameters, order).constant(17.0).getField(), 0);
}
// despite we have created numerous factories,
// there should be only one field for each pair parameters/order
Assert.assertEquals(pairs.size(), map.size());
@SuppressWarnings("unchecked")
Field<DerivativeStructure> first = (Field<DerivativeStructure>) map.entrySet().iterator().next().getKey();
Assert.assertTrue(first.equals(first));
Assert.assertFalse(first.equals(Decimal64Field.getInstance()));
}
@Test
public void testRebaseConditions() {
final DSFactory f32 = new DSFactory(3, 2);
final DSFactory f22 = new DSFactory(2, 2);
final DSFactory f31 = new DSFactory(3, 1);
try {
f32.variable(0, 0).rebase(f22.variable(0, 0), f22.variable(1, 1.0));
} catch (MathIllegalArgumentException miae) {
Assert.assertEquals(LocalizedCoreFormats.DIMENSIONS_MISMATCH, miae.getSpecifier());
Assert.assertEquals(3, ((Integer) miae.getParts()[0]).intValue());
Assert.assertEquals(2, ((Integer) miae.getParts()[1]).intValue());
}
try {
f32.variable(0, 0).rebase(f31.variable(0, 0), f31.variable(1, 1.0), f31.variable(2, 2.0));
} catch (MathIllegalArgumentException miae) {
Assert.assertEquals(LocalizedCoreFormats.DIMENSIONS_MISMATCH, miae.getSpecifier());
Assert.assertEquals(2, ((Integer) miae.getParts()[0]).intValue());
Assert.assertEquals(1, ((Integer) miae.getParts()[1]).intValue());
}
}
@Test
public void testRebaseValueMoreIntermediateThanBase() {
doTestRebaseValue(createBaseVariables(new DSFactory(2, 4), 1.5, -2.0),
q -> new DerivativeStructure[] {
q[0].add(q[1].multiply(3)),
q[0].log(),
q[1].divide(q[0].sin())
},
new DSFactory(3, 4),
p -> p[0].add(p[1].divide(p[2])),
1.0e-15);
}
@Test
public void testRebaseValueLessIntermediateThanBase() {
doTestRebaseValue(createBaseVariables(new DSFactory(3, 4), 1.5, -2.0, 0.5),
q -> new DerivativeStructure[] {
q[0].add(q[1].multiply(3)),
q[0].add(q[1]).subtract(q[2])
},
new DSFactory(2, 4),
p -> p[0].multiply(p[1]),
1.0e-15);
}
@Test
public void testRebaseValueEqualIntermediateAndBase() {
doTestRebaseValue(createBaseVariables(new DSFactory(2, 4), 1.5, -2.0),
q -> new DerivativeStructure[] {
q[0].add(q[1].multiply(3)),
q[0].add(q[1])
},
new DSFactory(2, 4),
p -> p[0].multiply(p[1]),
1.0e-15);
}
private void doTestRebaseValue(final DerivativeStructure[] q,
final CalculusFieldMultivariateVectorFunction<DerivativeStructure> qToP,
final DSFactory factoryP,
final CalculusFieldMultivariateFunction<DerivativeStructure> f,
final double tol) {
// intermediate variables as functions of base variables
final DerivativeStructure[] pBase = qToP.value(q);
// reference function
final DerivativeStructure ref = f.value(pBase);
// intermediate variables as independent variables
final DerivativeStructure[] pIntermediate = creatIntermediateVariables(factoryP, pBase);
// function of the intermediate variables
final DerivativeStructure fI = f.value(pIntermediate);
// function rebased to base variables
final DerivativeStructure rebased = fI.rebase(pBase);
Assert.assertEquals(q[0].getFreeParameters(), ref.getFreeParameters());
Assert.assertEquals(q[0].getOrder(), ref.getOrder());
Assert.assertEquals(factoryP.getCompiler().getFreeParameters(), fI.getFreeParameters());
Assert.assertEquals(factoryP.getCompiler().getOrder(), fI.getOrder());
Assert.assertEquals(ref.getFreeParameters(), rebased.getFreeParameters());
Assert.assertEquals(ref.getOrder(), rebased.getOrder());
checkEquals(ref, rebased, tol);
// compare with Taylor map based implementation
checkEquals(composeWithTaylorMap(fI, pBase), rebased, tol);
}
final DerivativeStructure[] createBaseVariables(final DSFactory factory, double... q) {
final DerivativeStructure[] qDS = new DerivativeStructure[q.length];
for (int i = 0; i < q.length; ++i) {
qDS[i] = factory.variable(i, q[i]);
}
return qDS;
}
final DerivativeStructure[] creatIntermediateVariables(final DSFactory factory, DerivativeStructure... pBase) {
final DerivativeStructure[] pIntermediate = new DerivativeStructure[pBase.length];
for (int i = 0; i < pBase.length; ++i) {
pIntermediate[i] = factory.variable(i, pBase[i].getReal());
}
return pIntermediate;
}
@Test
public void testRunTimeClass() {
Field<DerivativeStructure> field = new DSFactory(3, 2).constant(0.0).getField();
Assert.assertEquals(DerivativeStructure.class, field.getRuntimeClass());
}
private void checkF0F1(DerivativeStructure ds, double value, double...derivatives) {
// check dimension
Assert.assertEquals(derivatives.length, ds.getFreeParameters());
// check value, directly and also as 0th order derivative
Assert.assertEquals(value, ds.getValue(), 1.0e-15);
Assert.assertEquals(value, ds.getPartialDerivative(new int[ds.getFreeParameters()]), 1.0e-15);
// check first order derivatives
for (int i = 0; i < derivatives.length; ++i) {
int[] orders = new int[derivatives.length];
orders[i] = 1;
Assert.assertEquals(derivatives[i], ds.getPartialDerivative(orders), 1.0e-15);
}
}
private void checkEquals(DerivativeStructure ds1, DerivativeStructure ds2, double epsilon) {
// check dimension
Assert.assertEquals(ds1.getFreeParameters(), ds2.getFreeParameters());
Assert.assertEquals(ds1.getOrder(), ds2.getOrder());
int[] derivatives = new int[ds1.getFreeParameters()];
int sum = 0;
while (true) {
if (sum <= ds1.getOrder()) {
Assert.assertEquals(ds1.getPartialDerivative(derivatives),
ds2.getPartialDerivative(derivatives),
epsilon);
}
boolean increment = true;
sum = 0;
for (int i = derivatives.length - 1; i >= 0; --i) {
if (increment) {
if (derivatives[i] == ds1.getOrder()) {
derivatives[i] = 0;
} else {
derivatives[i]++;
increment = false;
}
}
sum += derivatives[i];
}
if (increment) {
return;
}
}
}
/** Compose the present derivatives on the right with a compatible so-called Taylor map i.e. an array of other
* partial derivatives. The output has the same number of independent variables than the right-hand side. */
private DerivativeStructure composeWithTaylorMap(final DerivativeStructure lhs, final DerivativeStructure[] rhs) {
// turn right-hand side of composition into Taylor expansions without constant term
final DSFactory rhsFactory = rhs[0].getFactory();
final TaylorExpansion[] rhsAsExpansions = new TaylorExpansion[rhs.length];
for (int k = 0; k < rhs.length; k++) {
final DerivativeStructure copied = new DerivativeStructure(rhsFactory, rhs[k].getAllDerivatives());
copied.setDerivativeComponent(0, 0.);
rhsAsExpansions[k] = new TaylorExpansion(copied);
}
// turn left-hand side of composition into Taylor expansion
final TaylorExpansion lhsAsExpansion = new TaylorExpansion(lhs);
// initialize quantities
TaylorExpansion te = new TaylorExpansion(rhsFactory.constant(lhs.getValue()));
TaylorExpansion[][] powers = new TaylorExpansion[rhs.length][lhs.getOrder()]; // for lazy storage of powers
// compose the Taylor expansions
final double[] coefficients = lhsAsExpansion.coefficients;
for (int j = 1; j < coefficients.length; j++) {
if (coefficients[j] != 0.) { // filter out null terms
TaylorExpansion inter = new TaylorExpansion(rhsFactory.constant(coefficients[j]));
final int[] orders = lhs.getFactory().getCompiler().getPartialDerivativeOrders(j);
for (int i = 0; i < orders.length; i++) {
if (orders[i] != 0) { // only consider non-trivial powers
if (powers[i][orders[i] - 1] == null) {
// this power has not been computed yet
final DerivativeStructure ds = new DerivativeStructure(rhsFactory, rhs[i].getAllDerivatives());
ds.setDerivativeComponent(0, 0.);
TaylorExpansion inter2 = new TaylorExpansion(ds);
for (int k = 1; k < orders[i]; k++) {
inter2 = inter2.multiply(rhsAsExpansions[i]);
}
powers[i][orders[i] - 1] = inter2;
}
inter = inter.multiply(powers[i][orders[i] - 1]);
}
}
te = te.add(inter);
}
}
// convert into derivatives object
return te.buildDsEquivalent();
}
/** Class to map partial derivatives to corresponding Taylor expansion. */
private static class TaylorExpansion {
/** Polynomial coefficients of the Taylor expansion in the local canonical basis. */
final double[] coefficients;
final double[] factorials;
final DSFactory dsFactory;
/** Constructor. */
TaylorExpansion(final DerivativeStructure ds) {
final double[] data = ds.getAllDerivatives();
this.dsFactory = ds.getFactory();
this.coefficients = new double[data.length];
// compute relevant factorials (would be more efficient to compute products of factorials to map Taylor
// expansions and partial derivatives)
this.factorials = new double[ds.getOrder() + 1];
Arrays.fill(this.factorials, 1.);
for (int i = 2; i < this.factorials.length; i++) {
this.factorials[i] = this.factorials[i - 1] * (double) (i); // avoid limit of 20! in ArithmeticUtils
}
// transform partial derivatives into coefficients of Taylor expansion
for (int j = 0; j < data.length; j++) {
this.coefficients[j] = data[j];
if (this.coefficients[j] != 0.) {
int[] orders = ds.getFactory().getCompiler().getPartialDerivativeOrders(j);
for (int order : orders) {
this.coefficients[j] /= this.factorials[order];
}
}
}
}
/** Builder for the corresponding {@link DerivativeStructure}. */
public DerivativeStructure buildDsEquivalent() throws MathIllegalArgumentException {
final DSCompiler dsc = this.dsFactory.getCompiler();
final double[] data = new double[this.coefficients.length];
for (int j = 0; j < data.length; j++) {
data[j] = this.coefficients[j];
if (data[j] != 0.) {
int[] orders = dsc.getPartialDerivativeOrders(j);
for (int order : orders) {
data[j] *= this.factorials[order];
}
}
}
return new DerivativeStructure(this.dsFactory, data);
}
TaylorExpansion add(final TaylorExpansion te) {
return new TaylorExpansion(this.buildDsEquivalent().add(te.buildDsEquivalent()));
}
TaylorExpansion multiply(final TaylorExpansion te) {
return new TaylorExpansion(this.buildDsEquivalent().multiply(te.buildDsEquivalent()));
}
}
}
| hipparchus-core/src/test/java/org/hipparchus/analysis/differentiation/DerivativeStructureTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* This is not the original file distributed by the Apache Software Foundation
* It has been modified by the Hipparchus project
*/
package org.hipparchus.analysis.differentiation;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.hipparchus.CalculusFieldElementAbstractTest;
import org.hipparchus.Field;
import org.hipparchus.UnitTestUtils;
import org.hipparchus.analysis.CalculusFieldMultivariateFunction;
import org.hipparchus.analysis.CalculusFieldMultivariateVectorFunction;
import org.hipparchus.analysis.polynomials.PolynomialFunction;
import org.hipparchus.exception.LocalizedCoreFormats;
import org.hipparchus.exception.MathIllegalArgumentException;
import org.hipparchus.random.RandomGenerator;
import org.hipparchus.random.Well1024a;
import org.hipparchus.random.Well19937a;
import org.hipparchus.util.ArithmeticUtils;
import org.hipparchus.util.CombinatoricsUtils;
import org.hipparchus.util.Decimal64Field;
import org.hipparchus.util.FastMath;
import org.hipparchus.util.FieldSinCos;
import org.hipparchus.util.FieldSinhCosh;
import org.hipparchus.util.Precision;
import org.junit.Assert;
import org.junit.Test;
/**
* Test for class {@link DerivativeStructure}.
*/
public class DerivativeStructureTest extends CalculusFieldElementAbstractTest<DerivativeStructure> {
@Override
protected DerivativeStructure build(final double x) {
return new DSFactory(2, 1).variable(0, x);
}
@Test(expected=MathIllegalArgumentException.class)
public void testWrongVariableIndex() {
new DSFactory(3, 1).variable(3, 1.0);
}
@Test(expected=MathIllegalArgumentException.class)
public void testMissingOrders() {
new DSFactory(3, 1).variable(0, 1.0).getPartialDerivative(0, 1);
}
@Test(expected=MathIllegalArgumentException.class)
public void testTooLargeOrder() {
new DSFactory(3, 1).variable(0, 1.0).getPartialDerivative(1, 1, 2);
}
@Test
public void testVariableWithoutDerivative0() {
DerivativeStructure v = new DSFactory(1, 0).variable(0, 1.0);
Assert.assertEquals(1.0, v.getValue(), 1.0e-15);
}
@Test(expected=MathIllegalArgumentException.class)
public void testVariableWithoutDerivative1() {
DerivativeStructure v = new DSFactory(1, 0).variable(0, 1.0);
Assert.assertEquals(1.0, v.getPartialDerivative(1), 1.0e-15);
}
@Test
public void testVariable() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0), 1.0, 1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0), 2.0, 0.0, 1.0, 0.0);
checkF0F1(factory.variable(2, 3.0), 3.0, 0.0, 0.0, 1.0);
}
}
@Test
public void testConstant() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.constant(FastMath.PI), FastMath.PI, 0.0, 0.0, 0.0);
}
}
@Test
public void testPrimitiveAdd() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).add(5), 6.0, 1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).add(5), 7.0, 0.0, 1.0, 0.0);
checkF0F1(factory.variable(2, 3.0).add(5), 8.0, 0.0, 0.0, 1.0);
}
}
@Test
public void testAdd() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.add(y.add(z));
checkF0F1(xyz, x.getValue() + y.getValue() + z.getValue(), 1.0, 1.0, 1.0);
}
}
@Test
public void testPrimitiveSubtract() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).subtract(5), -4.0, 1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).subtract(5), -3.0, 0.0, 1.0, 0.0);
checkF0F1(factory.variable(2, 3.0).subtract(5), -2.0, 0.0, 0.0, 1.0);
}
}
@Test
public void testSubtract() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.subtract(y.subtract(z));
checkF0F1(xyz, x.getValue() - (y.getValue() - z.getValue()), 1.0, -1.0, 1.0);
}
}
@Test
public void testPrimitiveMultiply() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).multiply(5), 5.0, 5.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).multiply(5), 10.0, 0.0, 5.0, 0.0);
checkF0F1(factory.variable(2, 3.0).multiply(5), 15.0, 0.0, 0.0, 5.0);
}
}
@Test
public void testMultiply() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.multiply(y.multiply(z));
for (int i = 0; i <= maxOrder; ++i) {
for (int j = 0; j <= maxOrder; ++j) {
for (int k = 0; k <= maxOrder; ++k) {
if (i + j + k <= maxOrder) {
Assert.assertEquals((i == 0 ? x.getValue() : (i == 1 ? 1.0 : 0.0)) *
(j == 0 ? y.getValue() : (j == 1 ? 1.0 : 0.0)) *
(k == 0 ? z.getValue() : (k == 1 ? 1.0 : 0.0)),
xyz.getPartialDerivative(i, j, k),
1.0e-15);
}
}
}
}
}
}
@Test
public void testNegate() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
checkF0F1(factory.variable(0, 1.0).negate(), -1.0, -1.0, 0.0, 0.0);
checkF0F1(factory.variable(1, 2.0).negate(), -2.0, 0.0, -1.0, 0.0);
checkF0F1(factory.variable(2, 3.0).negate(), -3.0, 0.0, 0.0, -1.0);
}
}
@Test
public void testReciprocal() {
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure r = new DSFactory(1, 6).variable(0, x).reciprocal();
Assert.assertEquals(1 / x, r.getValue(), 1.0e-15);
for (int i = 1; i < r.getOrder(); ++i) {
double expected = ArithmeticUtils.pow(-1, i) * CombinatoricsUtils.factorial(i) /
FastMath.pow(x, i + 1);
Assert.assertEquals(expected, r.getPartialDerivative(i), 1.0e-15 * FastMath.abs(expected));
}
}
}
@Test
public void testPow() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
for (int n = 0; n < 10; ++n) {
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
List<DerivativeStructure> list = Arrays.asList(x, y, z,
x.add(y).add(z),
x.multiply(y).multiply(z));
if (n == 0) {
for (DerivativeStructure ds : list) {
checkEquals(ds.getField().getOne(), FastMath.pow(ds, n), 1.0e-15);
}
} else if (n == 1) {
for (DerivativeStructure ds : list) {
checkEquals(ds, FastMath.pow(ds, n), 1.0e-15);
}
} else {
for (DerivativeStructure ds : list) {
DerivativeStructure p = ds.getField().getOne();
for (int i = 0; i < n; ++i) {
p = p.multiply(ds);
}
checkEquals(p, FastMath.pow(ds, n), 1.0e-15);
}
}
}
}
}
@Test
public void testPowDoubleDS() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 0.1);
DerivativeStructure y = factory.variable(1, 0.2);
DerivativeStructure z = factory.variable(2, 0.3);
List<DerivativeStructure> list = Arrays.asList(x, y, z,
x.add(y).add(z),
x.multiply(y).multiply(z));
for (DerivativeStructure ds : list) {
// the special case a = 0 is included here
for (double a : new double[] { 0.0, 0.1, 1.0, 2.0, 5.0 }) {
DerivativeStructure reference = (a == 0) ?
x.getField().getZero() :
FastMath.pow(new DSFactory(3, maxOrder).constant(a), ds);
DerivativeStructure result = DerivativeStructure.pow(a, ds);
checkEquals(reference, result, 1.0e-15);
}
}
// negative base: -1^x can be evaluated for integers only, so value is sometimes OK, derivatives are always NaN
DerivativeStructure negEvenInteger = DerivativeStructure.pow(-2.0, factory.variable(0, 2.0));
Assert.assertEquals(4.0, negEvenInteger.getValue(), 1.0e-15);
Assert.assertTrue(Double.isNaN(negEvenInteger.getPartialDerivative(1, 0, 0)));
DerivativeStructure negOddInteger = DerivativeStructure.pow(-2.0, factory.variable(0, 3.0));
Assert.assertEquals(-8.0, negOddInteger.getValue(), 1.0e-15);
Assert.assertTrue(Double.isNaN(negOddInteger.getPartialDerivative(1, 0, 0)));
DerivativeStructure negNonInteger = DerivativeStructure.pow(-2.0, factory.variable(0, 2.001));
Assert.assertTrue(Double.isNaN(negNonInteger.getValue()));
Assert.assertTrue(Double.isNaN(negNonInteger.getPartialDerivative(1, 0, 0)));
DerivativeStructure zeroNeg = DerivativeStructure.pow(0.0, factory.variable(0, -1.0));
Assert.assertTrue(Double.isNaN(zeroNeg.getValue()));
Assert.assertTrue(Double.isNaN(zeroNeg.getPartialDerivative(1, 0, 0)));
DerivativeStructure posNeg = DerivativeStructure.pow(2.0, factory.variable(0, -2.0));
Assert.assertEquals(1.0 / 4.0, posNeg.getValue(), 1.0e-15);
Assert.assertEquals(FastMath.log(2.0) / 4.0, posNeg.getPartialDerivative(1, 0, 0), 1.0e-15);
// very special case: a = 0 and power = 0
DerivativeStructure zeroZero = DerivativeStructure.pow(0.0, factory.variable(0, 0.0));
// this should be OK for simple first derivative with one variable only ...
Assert.assertEquals(1.0, zeroZero.getValue(), 1.0e-15);
Assert.assertEquals(Double.NEGATIVE_INFINITY, zeroZero.getPartialDerivative(1, 0, 0), 1.0e-15);
// the following checks show a LIMITATION of the current implementation
// we have no way to tell x is a pure linear variable x = 0
// we only say: "x is a structure with value = 0.0,
// first derivative with respect to x = 1.0, and all other derivatives
// (first order with respect to y and z and higher derivatives) all 0.0.
// We have function f(x) = a^x and x = 0 so we compute:
// f(0) = 1, f'(0) = ln(a), f''(0) = ln(a)^2. The limit of these values
// when a converges to 0 implies all derivatives keep switching between
// +infinity and -infinity.
//
// Function composition rule for first derivatives is:
// d[f(g(x,y,z))]/dy = f'(g(x,y,z)) * dg(x,y,z)/dy
// so given that in our case x represents g and does not depend
// on y or z, we have dg(x,y,z)/dy = 0
// applying the composition rules gives:
// d[f(g(x,y,z))]/dy = f'(g(x,y,z)) * dg(x,y,z)/dy
// = -infinity * 0
// = NaN
// if we knew x is really the x variable and not the identity
// function applied to x, we would not have computed f'(g(x,y,z)) * dg(x,y,z)/dy
// and we would have found that the result was 0 and not NaN
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 1, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 0, 1)));
// Function composition rule for second derivatives is:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// when function f is the a^x root and x = 0 we have:
// f(0) = 1, f'(0) = ln(a), f''(0) = ln(a)^2 which for a = 0 implies
// all derivatives keep switching between +infinity and -infinity
// so given that in our case x represents g, we have g(x) = 0,
// g'(x) = 1 and g''(x) = 0
// applying the composition rules gives:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// = +infinity * 1^2 + -infinity * 0
// = +infinity + NaN
// = NaN
// if we knew x is really the x variable and not the identity
// function applied to x, we would not have computed f'(g(x)) * g''(x)
// and we would have found that the result was +infinity and not NaN
if (maxOrder > 1) {
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(2, 0, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 2, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 0, 2)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(1, 1, 0)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(0, 1, 1)));
Assert.assertTrue(Double.isNaN(zeroZero.getPartialDerivative(1, 1, 0)));
}
// very special case: 0^0 where the power is a primitive
DerivativeStructure zeroDsZeroDouble = factory.variable(0, 0.0).pow(0.0);
boolean first = true;
for (final double d : zeroDsZeroDouble.getAllDerivatives()) {
if (first) {
Assert.assertEquals(1.0, d, Precision.EPSILON);
first = false;
} else {
Assert.assertEquals(0.0, d, Precision.SAFE_MIN);
}
}
DerivativeStructure zeroDsZeroInt = factory.variable(0, 0.0).pow(0);
first = true;
for (final double d : zeroDsZeroInt.getAllDerivatives()) {
if (first) {
Assert.assertEquals(1.0, d, Precision.EPSILON);
first = false;
} else {
Assert.assertEquals(0.0, d, Precision.SAFE_MIN);
}
}
// 0^p with p smaller than 1.0
DerivativeStructure u = factory.variable(1, -0.0).pow(0.25);
for (int i0 = 0; i0 <= maxOrder; ++i0) {
for (int i1 = 0; i1 <= maxOrder; ++i1) {
for (int i2 = 0; i2 <= maxOrder; ++i2) {
if (i0 + i1 + i2 <= maxOrder) {
Assert.assertEquals(0.0, u.getPartialDerivative(i0, i1, i2), 1.0e-10);
}
}
}
}
}
}
@Test
public void testScalb() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
DerivativeStructure y = factory.variable(1, 2.0);
DerivativeStructure z = factory.variable(2, 3.0);
DerivativeStructure xyz = x.multiply(y.multiply(z));
double s = 0.125;
for (int n = -3; n <= 3; ++n) {
DerivativeStructure scaled = xyz.scalb(n);
for (int i = 0; i <= maxOrder; ++i) {
for (int j = 0; j <= maxOrder; ++j) {
for (int k = 0; k <= maxOrder; ++k) {
if (i + j + k <= maxOrder) {
Assert.assertEquals((i == 0 ? x.getValue() : (i == 1 ? 1.0 : 0.0)) *
(j == 0 ? y.getValue() : (j == 1 ? 1.0 : 0.0)) *
(k == 0 ? z.getValue() : (k == 1 ? 1.0 : 0.0)) *
s,
scaled.getPartialDerivative(i, j, k),
1.0e-15);
}
}
}
}
s *= 2;
}
}
}
@Test
public void testUlp() {
final RandomGenerator random = new Well19937a(0x85d201920b5be954l);
for (int k = 0; k < 10000; ++k) {
int maxOrder = 1 + random.nextInt(5);
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, FastMath.scalb(2 * random.nextDouble() - 1, random.nextInt(600) - 300));
DerivativeStructure y = factory.variable(1, FastMath.scalb(2 * random.nextDouble() - 1, random.nextInt(600) - 300));
DerivativeStructure z = factory.variable(2, FastMath.scalb(2 * random.nextDouble() - 1, random.nextInt(600) - 300));
DerivativeStructure xyz = x.multiply(y.multiply(z));
DerivativeStructure ulp = xyz.ulp();
boolean first = true;
for (double d : ulp.getAllDerivatives()) {
Assert.assertEquals(first ? FastMath.ulp(xyz.getValue()) : 0.0, d, 1.0e-15 * FastMath.ulp(xyz.getValue()));
first = false;
}
}
}
@Test
public void testExpression() {
DSFactory factory = new DSFactory(3, 5);
double epsilon = 2.5e-13;
for (double x = 0; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
for (double z = 0; z >- 2; z -= 0.2) {
DerivativeStructure dsZ = factory.variable(2, z);
// f(x, y, z) = x + 5 x y - 2 z + (8 z x - y)^3
DerivativeStructure ds =
dsX.linearCombination(1, dsX,
5, dsX.multiply(dsY),
-2, dsZ,
1, dsX.linearCombination(8, dsZ.multiply(dsX),
-1, dsY).pow(3));
DerivativeStructure dsOther =
dsX.linearCombination(1, dsX,
5, dsX.multiply(dsY),
-2, dsZ).add(dsX.linearCombination(8, dsZ.multiply(dsX),
-1, dsY).pow(3));
double f = x + 5 * x * y - 2 * z + FastMath.pow(8 * z * x - y, 3);
Assert.assertEquals(f, ds.getValue(),
FastMath.abs(epsilon * f));
Assert.assertEquals(f, dsOther.getValue(),
FastMath.abs(epsilon * f));
// df/dx = 1 + 5 y + 24 (8 z x - y)^2 z
double dfdx = 1 + 5 * y + 24 * z * FastMath.pow(8 * z * x - y, 2);
Assert.assertEquals(dfdx, ds.getPartialDerivative(1, 0, 0),
FastMath.abs(epsilon * dfdx));
Assert.assertEquals(dfdx, dsOther.getPartialDerivative(1, 0, 0),
FastMath.abs(epsilon * dfdx));
// df/dxdy = 5 + 48 z*(y - 8 z x)
double dfdxdy = 5 + 48 * z * (y - 8 * z * x);
Assert.assertEquals(dfdxdy, ds.getPartialDerivative(1, 1, 0),
FastMath.abs(epsilon * dfdxdy));
Assert.assertEquals(dfdxdy, dsOther.getPartialDerivative(1, 1, 0),
FastMath.abs(epsilon * dfdxdy));
// df/dxdydz = 48 (y - 16 z x)
double dfdxdydz = 48 * (y - 16 * z * x);
Assert.assertEquals(dfdxdydz, ds.getPartialDerivative(1, 1, 1),
FastMath.abs(epsilon * dfdxdydz));
Assert.assertEquals(dfdxdydz, dsOther.getPartialDerivative(1, 1, 1),
FastMath.abs(epsilon * dfdxdydz));
}
}
}
}
@Test
public void testCompositionOneVariableX() {
double epsilon = 1.0e-13;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0.1; y < 1.2; y += 0.1) {
DerivativeStructure dsY = factory.constant(y);
DerivativeStructure f = dsX.divide(dsY).sqrt();
double f0 = FastMath.sqrt(x / y);
Assert.assertEquals(f0, f.getValue(), FastMath.abs(epsilon * f0));
if (f.getOrder() > 0) {
double f1 = 1 / (2 * FastMath.sqrt(x * y));
Assert.assertEquals(f1, f.getPartialDerivative(1), FastMath.abs(epsilon * f1));
if (f.getOrder() > 1) {
double f2 = -f1 / (2 * x);
Assert.assertEquals(f2, f.getPartialDerivative(2), FastMath.abs(epsilon * f2));
if (f.getOrder() > 2) {
double f3 = (f0 + x / (2 * y * f0)) / (4 * x * x * x);
Assert.assertEquals(f3, f.getPartialDerivative(3), FastMath.abs(epsilon * f3));
}
}
}
}
}
}
}
@Test
public void testTrigo() {
double epsilon = 2.0e-12;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0.1; y < 1.2; y += 0.1) {
DerivativeStructure dsY = factory.variable(1, y);
for (double z = 0.1; z < 1.2; z += 0.1) {
DerivativeStructure dsZ = factory.variable(2, z);
DerivativeStructure f = FastMath.sin(dsX.divide(FastMath.cos(dsY).add(FastMath.tan(dsZ))));
double a = FastMath.cos(y) + FastMath.tan(z);
double f0 = FastMath.sin(x / a);
Assert.assertEquals(f0, f.getValue(), FastMath.abs(epsilon * f0));
if (f.getOrder() > 0) {
double dfdx = FastMath.cos(x / a) / a;
Assert.assertEquals(dfdx, f.getPartialDerivative(1, 0, 0), FastMath.abs(epsilon * dfdx));
double dfdy = x * FastMath.sin(y) * dfdx / a;
Assert.assertEquals(dfdy, f.getPartialDerivative(0, 1, 0), FastMath.abs(epsilon * dfdy));
double cz = FastMath.cos(z);
double cz2 = cz * cz;
double dfdz = -x * dfdx / (a * cz2);
Assert.assertEquals(dfdz, f.getPartialDerivative(0, 0, 1), FastMath.abs(epsilon * dfdz));
if (f.getOrder() > 1) {
double df2dx2 = -(f0 / (a * a));
Assert.assertEquals(df2dx2, f.getPartialDerivative(2, 0, 0), FastMath.abs(epsilon * df2dx2));
double df2dy2 = x * FastMath.cos(y) * dfdx / a -
x * x * FastMath.sin(y) * FastMath.sin(y) * f0 / (a * a * a * a) +
2 * FastMath.sin(y) * dfdy / a;
Assert.assertEquals(df2dy2, f.getPartialDerivative(0, 2, 0), FastMath.abs(epsilon * df2dy2));
double c4 = cz2 * cz2;
double df2dz2 = x * (2 * a * (1 - a * cz * FastMath.sin(z)) * dfdx - x * f0 / a ) / (a * a * a * c4);
Assert.assertEquals(df2dz2, f.getPartialDerivative(0, 0, 2), FastMath.abs(epsilon * df2dz2));
double df2dxdy = dfdy / x - x * FastMath.sin(y) * f0 / (a * a * a);
Assert.assertEquals(df2dxdy, f.getPartialDerivative(1, 1, 0), FastMath.abs(epsilon * df2dxdy));
}
}
}
}
}
}
}
@Test
public void testSqrtDefinition() {
double[] epsilon = new double[] { 5.0e-16, 5.0e-16, 2.0e-15, 5.0e-14, 2.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure sqrt1 = dsX.pow(0.5);
DerivativeStructure sqrt2 = FastMath.sqrt(dsX);
DerivativeStructure zero = sqrt1.subtract(sqrt2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testRootNSingularity() {
for (int n = 2; n < 10; ++n) {
for (int maxOrder = 0; maxOrder < 12; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
DerivativeStructure dsZero = factory.variable(0, 0.0);
DerivativeStructure rootN = dsZero.rootN(n);
Assert.assertEquals(0.0, rootN.getValue(), 1.0e-20);
if (maxOrder > 0) {
Assert.assertTrue(Double.isInfinite(rootN.getPartialDerivative(1)));
Assert.assertTrue(rootN.getPartialDerivative(1) > 0);
for (int order = 2; order <= maxOrder; ++order) {
// the following checks shows a LIMITATION of the current implementation
// we have no way to tell dsZero is a pure linear variable x = 0
// we only say: "dsZero is a structure with value = 0.0,
// first derivative = 1.0, second and higher derivatives = 0.0".
// Function composition rule for second derivatives is:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// when function f is the nth root and x = 0 we have:
// f(0) = 0, f'(0) = +infinity, f''(0) = -infinity (and higher
// derivatives keep switching between +infinity and -infinity)
// so given that in our case dsZero represents g, we have g(x) = 0,
// g'(x) = 1 and g''(x) = 0
// applying the composition rules gives:
// d2[f(g(x))]/dx2 = f''(g(x)) * [g'(x)]^2 + f'(g(x)) * g''(x)
// = -infinity * 1^2 + +infinity * 0
// = -infinity + NaN
// = NaN
// if we knew dsZero is really the x variable and not the identity
// function applied to x, we would not have computed f'(g(x)) * g''(x)
// and we would have found that the result was -infinity and not NaN
Assert.assertTrue(Double.isNaN(rootN.getPartialDerivative(order)));
}
}
// the following shows that the limitation explained above is NOT a bug...
// if we set up the higher order derivatives for g appropriately, we do
// compute the higher order derivatives of the composition correctly
double[] gDerivatives = new double[ 1 + maxOrder];
gDerivatives[0] = 0.0;
for (int k = 1; k <= maxOrder; ++k) {
gDerivatives[k] = FastMath.pow(-1.0, k + 1);
}
DerivativeStructure correctRoot = factory.build(gDerivatives).rootN(n);
Assert.assertEquals(0.0, correctRoot.getValue(), 1.0e-20);
if (maxOrder > 0) {
Assert.assertTrue(Double.isInfinite(correctRoot.getPartialDerivative(1)));
Assert.assertTrue(correctRoot.getPartialDerivative(1) > 0);
for (int order = 2; order <= maxOrder; ++order) {
Assert.assertTrue(Double.isInfinite(correctRoot.getPartialDerivative(order)));
if ((order % 2) == 0) {
Assert.assertTrue(correctRoot.getPartialDerivative(order) < 0);
} else {
Assert.assertTrue(correctRoot.getPartialDerivative(order) > 0);
}
}
}
}
}
}
@Test
public void testSqrtPow2() {
double[] epsilon = new double[] { 1.0e-16, 3.0e-16, 2.0e-15, 6.0e-14, 6.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.multiply(dsX).sqrt();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCbrtDefinition() {
double[] epsilon = new double[] { 4.0e-16, 9.0e-16, 6.0e-15, 2.0e-13, 4.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure cbrt1 = dsX.pow(1.0 / 3.0);
DerivativeStructure cbrt2 = FastMath.cbrt(dsX);
DerivativeStructure zero = cbrt1.subtract(cbrt2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCbrtPow3() {
double[] epsilon = new double[] { 1.0e-16, 5.0e-16, 8.0e-15, 3.0e-13, 4.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.multiply(dsX.multiply(dsX)).cbrt();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testPowReciprocalPow() {
double[] epsilon = new double[] { 2.0e-15, 2.0e-14, 3.0e-13, 8.0e-12, 3.0e-10 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.01) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0.1; y < 1.2; y += 0.01) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure rebuiltX = dsX.pow(dsY).pow(dsY.reciprocal());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n, m), epsilon[n + m]);
}
}
}
}
}
}
}
@Test
public void testHypotDefinition() {
double epsilon = 1.0e-20;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure hypot = FastMath.hypot(dsY, dsX);
DerivativeStructure ref = dsX.multiply(dsX).add(dsY.multiply(dsY)).sqrt();
DerivativeStructure zero = hypot.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon);
}
}
}
}
}
}
}
@Test
public void testHypotNoOverflow() {
DSFactory factory = new DSFactory(2, 5);
DerivativeStructure dsX = factory.variable(0, +3.0e250);
DerivativeStructure dsY = factory.variable(1, -4.0e250);
DerivativeStructure hypot = FastMath.hypot(dsX, dsY);
Assert.assertEquals(5.0e250, hypot.getValue(), 1.0e235);
Assert.assertEquals(dsX.getValue() / hypot.getValue(), hypot.getPartialDerivative(1, 0), 1.0e-10);
Assert.assertEquals(dsY.getValue() / hypot.getValue(), hypot.getPartialDerivative(0, 1), 1.0e-10);
DerivativeStructure sqrt = dsX.multiply(dsX).add(dsY.multiply(dsY)).sqrt();
Assert.assertTrue(Double.isInfinite(sqrt.getValue()));
}
@Test
public void testHypotNeglectible() {
DSFactory factory = new DSFactory(2, 5);
DerivativeStructure dsSmall = factory.variable(0, +3.0e-10);
DerivativeStructure dsLarge = factory.variable(1, -4.0e25);
Assert.assertEquals(dsLarge.abs().getValue(),
DerivativeStructure.hypot(dsSmall, dsLarge).getValue(),
1.0e-10);
Assert.assertEquals(0,
DerivativeStructure.hypot(dsSmall, dsLarge).getPartialDerivative(1, 0),
1.0e-10);
Assert.assertEquals(-1,
DerivativeStructure.hypot(dsSmall, dsLarge).getPartialDerivative(0, 1),
1.0e-10);
Assert.assertEquals(dsLarge.abs().getValue(),
DerivativeStructure.hypot(dsLarge, dsSmall).getValue(),
1.0e-10);
Assert.assertEquals(0,
DerivativeStructure.hypot(dsLarge, dsSmall).getPartialDerivative(1, 0),
1.0e-10);
Assert.assertEquals(-1,
DerivativeStructure.hypot(dsLarge, dsSmall).getPartialDerivative(0, 1),
1.0e-10);
}
@Test
public void testHypotSpecial() {
DSFactory factory = new DSFactory(2, 5);
Assert.assertTrue(Double.isNaN(DerivativeStructure.hypot(factory.variable(0, Double.NaN),
factory.variable(0, +3.0e250)).getValue()));
Assert.assertTrue(Double.isNaN(DerivativeStructure.hypot(factory.variable(0, +3.0e250),
factory.variable(0, Double.NaN)).getValue()));
Assert.assertTrue(Double.isInfinite(DerivativeStructure.hypot(factory.variable(0, Double.POSITIVE_INFINITY),
factory.variable(0, +3.0e250)).getValue()));
Assert.assertTrue(Double.isInfinite(DerivativeStructure.hypot(factory.variable(0, +3.0e250),
factory.variable(0, Double.POSITIVE_INFINITY)).getValue()));
}
@Test
public void testPrimitiveRemainder() {
double epsilon = 1.0e-15;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure remainder = FastMath.IEEEremainder(dsX, y);
DerivativeStructure ref = dsX.subtract(x - FastMath.IEEEremainder(x, y));
DerivativeStructure zero = remainder.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon);
}
}
}
}
}
}
}
@Test
public void testRemainder() {
double epsilon = 2.0e-15;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure remainder = FastMath.IEEEremainder(dsX, dsY);
DerivativeStructure ref = dsX.subtract(dsY.multiply((x - FastMath.IEEEremainder(x, y)) / y));
DerivativeStructure zero = remainder.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon);
}
}
}
}
}
}
}
@Override
@Test
public void testExp() {
double[] epsilon = new double[] { 1.0e-16, 1.0e-16, 1.0e-16, 1.0e-16, 1.0e-16 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
double refExp = FastMath.exp(x);
DerivativeStructure exp = FastMath.exp(factory.variable(0, x));
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(refExp, exp.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testExpm1Definition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure expm11 = FastMath.expm1(dsX);
DerivativeStructure expm12 = dsX.exp().subtract(dsX.getField().getOne());
DerivativeStructure zero = expm11.subtract(expm12);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon);
}
}
}
}
@Override
@Test
public void testLog() {
double[] epsilon = new double[] { 1.0e-16, 1.0e-16, 3.0e-14, 7.0e-13, 3.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure log = FastMath.log(factory.variable(0, x));
Assert.assertEquals(FastMath.log(x), log.getValue(), epsilon[0]);
for (int n = 1; n <= maxOrder; ++n) {
double refDer = -CombinatoricsUtils.factorial(n - 1) / FastMath.pow(-x, n);
Assert.assertEquals(refDer, log.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLog1pDefinition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
for (double x = 0.1; x < 1.2; x += 0.001) {
DSFactory factory = new DSFactory(1, maxOrder);
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure log1p1 = FastMath.log1p(dsX);
DerivativeStructure log1p2 = FastMath.log(dsX.add(dsX.getField().getOne()));
DerivativeStructure zero = log1p1.subtract(log1p2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon);
}
}
}
}
@Test
public void testLog10Definition() {
double[] epsilon = new double[] { 3.0e-16, 9.0e-16, 8.0e-15, 3.0e-13, 8.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure log101 = FastMath.log10(dsX);
DerivativeStructure log102 = dsX.log().divide(FastMath.log(10.0));
DerivativeStructure zero = log101.subtract(log102);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLogExp() {
double[] epsilon = new double[] { 2.0e-16, 2.0e-16, 3.0e-16, 2.0e-15, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.exp().log();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLog1pExpm1() {
double[] epsilon = new double[] { 6.0e-17, 3.0e-16, 5.0e-16, 9.0e-16, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.expm1().log1p();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testLog10Power() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 9.0e-16, 6.0e-15, 6.0e-14 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = factory.constant(10.0).pow(dsX).log10();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testSinCosSeparated() {
double epsilon = 5.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure sin = FastMath.sin(dsX);
DerivativeStructure cos = FastMath.cos(dsX);
double s = FastMath.sin(x);
double c = FastMath.cos(x);
for (int n = 0; n <= maxOrder; ++n) {
switch (n % 4) {
case 0 :
Assert.assertEquals( s, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals( c, cos.getPartialDerivative(n), epsilon);
break;
case 1 :
Assert.assertEquals( c, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals(-s, cos.getPartialDerivative(n), epsilon);
break;
case 2 :
Assert.assertEquals(-s, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals(-c, cos.getPartialDerivative(n), epsilon);
break;
default :
Assert.assertEquals(-c, sin.getPartialDerivative(n), epsilon);
Assert.assertEquals( s, cos.getPartialDerivative(n), epsilon);
break;
}
}
}
}
}
@Test
public void testSinCosCombined() {
double epsilon = 5.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
FieldSinCos<DerivativeStructure> sinCos = FastMath.sinCos(dsX);
double s = FastMath.sin(x);
double c = FastMath.cos(x);
for (int n = 0; n <= maxOrder; ++n) {
switch (n % 4) {
case 0 :
Assert.assertEquals( s, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals( c, sinCos.cos().getPartialDerivative(n), epsilon);
break;
case 1 :
Assert.assertEquals( c, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals(-s, sinCos.cos().getPartialDerivative(n), epsilon);
break;
case 2 :
Assert.assertEquals(-s, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals(-c, sinCos.cos().getPartialDerivative(n), epsilon);
break;
default :
Assert.assertEquals(-c, sinCos.sin().getPartialDerivative(n), epsilon);
Assert.assertEquals( s, sinCos.cos().getPartialDerivative(n), epsilon);
break;
}
}
}
}
}
@Test
public void testSinAsin() {
double[] epsilon = new double[] { 3.0e-16, 5.0e-16, 3.0e-15, 2.0e-14, 4.0e-13 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.asin(FastMath.sin(dsX));
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCosAcos() {
double[] epsilon = new double[] { 6.0e-16, 6.0e-15, 2.0e-13, 4.0e-12, 2.0e-10 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.acos(FastMath.cos(dsX));
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTanAtan() {
double[] epsilon = new double[] { 6.0e-17, 2.0e-16, 2.0e-15, 4.0e-14, 2.0e-12 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.atan(FastMath.tan(dsX));
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTangentDefinition() {
double[] epsilon = new double[] { 5.0e-16, 2.0e-15, 3.0e-14, 5.0e-13, 2.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure tan1 = dsX.sin().divide(dsX.cos());
DerivativeStructure tan2 = dsX.tan();
DerivativeStructure zero = tan1.subtract(tan2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Override
@Test
public void testAtan2() {
double[] epsilon = new double[] { 5.0e-16, 3.0e-15, 2.2e-14, 1.0e-12, 8.0e-11 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
for (double x = -1.7; x < 2; x += 0.2) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = -1.7; y < 2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
DerivativeStructure atan2 = FastMath.atan2(dsY, dsX);
DerivativeStructure ref = dsY.divide(dsX).atan();
if (x < 0) {
ref = (y < 0) ? ref.subtract(FastMath.PI) : ref.add(FastMath.PI);
}
DerivativeStructure zero = atan2.subtract(ref);
for (int n = 0; n <= maxOrder; ++n) {
for (int m = 0; m <= maxOrder; ++m) {
if (n + m <= maxOrder) {
Assert.assertEquals(0, zero.getPartialDerivative(n, m), epsilon[n + m]);
}
}
}
}
}
}
}
@Test
public void testAtan2SpecialCasesDerivative() {
DSFactory factory = new DSFactory(2, 2);
DerivativeStructure pp =
DerivativeStructure.atan2(factory.variable(1, +0.0),
factory.variable(1, +0.0));
Assert.assertEquals(0, pp.getValue(), 1.0e-15);
Assert.assertEquals(+1, FastMath.copySign(1, pp.getValue()), 1.0e-15);
DerivativeStructure pn =
DerivativeStructure.atan2(factory.variable(1, +0.0),
factory.variable(1, -0.0));
Assert.assertEquals(FastMath.PI, pn.getValue(), 1.0e-15);
DerivativeStructure np =
DerivativeStructure.atan2(factory.variable(1, -0.0),
factory.variable(1, +0.0));
Assert.assertEquals(0, np.getValue(), 1.0e-15);
Assert.assertEquals(-1, FastMath.copySign(1, np.getValue()), 1.0e-15);
DerivativeStructure nn =
DerivativeStructure.atan2(factory.variable(1, -0.0),
factory.variable(1, -0.0));
Assert.assertEquals(-FastMath.PI, nn.getValue(), 1.0e-15);
}
@Test
public void testSinhCoshCombined() {
double epsilon = 5.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
FieldSinhCosh<DerivativeStructure> sinhCosh = FastMath.sinhCosh(dsX);
double sh = FastMath.sinh(x);
double ch = FastMath.cosh(x);
for (int n = 0; n <= maxOrder; ++n) {
if (n % 2 == 0) {
Assert.assertEquals(sh, sinhCosh.sinh().getPartialDerivative(n), epsilon);
Assert.assertEquals(ch, sinhCosh.cosh().getPartialDerivative(n), epsilon);
} else {
Assert.assertEquals(ch, sinhCosh.sinh().getPartialDerivative(n), epsilon);
Assert.assertEquals(sh, sinhCosh.cosh().getPartialDerivative(n), epsilon);
}
}
}
}
}
@Test
public void testSinhDefinition() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 5.0e-16, 2.0e-15, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure sinh1 = dsX.exp().subtract(dsX.exp().reciprocal()).multiply(0.5);
DerivativeStructure sinh2 = FastMath.sinh(dsX);
DerivativeStructure zero = sinh1.subtract(sinh2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCoshDefinition() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 5.0e-16, 2.0e-15, 6.0e-15 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure cosh1 = dsX.exp().add(dsX.exp().reciprocal()).multiply(0.5);
DerivativeStructure cosh2 = FastMath.cosh(dsX);
DerivativeStructure zero = cosh1.subtract(cosh2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTanhDefinition() {
double[] epsilon = new double[] { 3.0e-16, 5.0e-16, 7.0e-16, 3.0e-15, 2.0e-14 };
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure tanh1 = dsX.exp().subtract(dsX.exp().reciprocal()).divide(dsX.exp().add(dsX.exp().reciprocal()));
DerivativeStructure tanh2 = FastMath.tanh(dsX);
DerivativeStructure zero = tanh1.subtract(tanh2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testSinhAsinh() {
double[] epsilon = new double[] { 3.0e-16, 3.0e-16, 4.0e-16, 7.0e-16, 3.0e-15, 8.0e-15 };
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.asinh(dsX.sinh());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCoshAcosh() {
double[] epsilon = new double[] { 2.0e-15, 1.0e-14, 2.0e-13, 6.0e-12, 3.0e-10, 2.0e-8 };
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.acosh(dsX.cosh());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testTanhAtanh() {
double[] epsilon = new double[] { 3.0e-16, 2.0e-16, 7.0e-16, 4.0e-15, 3.0e-14, 4.0e-13 };
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = FastMath.atanh(dsX.tanh());
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testCompositionOneVariableY() {
double epsilon = 1.0e-13;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.constant(x);
for (double y = 0.1; y < 1.2; y += 0.1) {
DerivativeStructure dsY = factory.variable(0, y);
DerivativeStructure f = dsX.divide(dsY).sqrt();
double f0 = FastMath.sqrt(x / y);
Assert.assertEquals(f0, f.getValue(), FastMath.abs(epsilon * f0));
if (f.getOrder() > 0) {
double f1 = -x / (2 * y * y * f0);
Assert.assertEquals(f1, f.getPartialDerivative(1), FastMath.abs(epsilon * f1));
if (f.getOrder() > 1) {
double f2 = (f0 - x / (4 * y * f0)) / (y * y);
Assert.assertEquals(f2, f.getPartialDerivative(2), FastMath.abs(epsilon * f2));
if (f.getOrder() > 2) {
double f3 = (x / (8 * y * f0) - 2 * f0) / (y * y * y);
Assert.assertEquals(f3, f.getPartialDerivative(3), FastMath.abs(epsilon * f3));
}
}
}
}
}
}
}
@Test
public void testTaylorPolynomial() {
DSFactory factory = new DSFactory(3, 4);
for (double x = 0; x < 1.2; x += 0.1) {
DerivativeStructure dsX = factory.variable(0, x);
for (double y = 0; y < 1.2; y += 0.2) {
DerivativeStructure dsY = factory.variable(1, y);
for (double z = 0; z < 1.2; z += 0.2) {
DerivativeStructure dsZ = factory.variable(2, z);
DerivativeStructure f = dsX.multiply(dsY).add(dsZ).multiply(dsX).multiply(dsY);
for (double dx = -0.2; dx < 0.2; dx += 0.2) {
for (double dy = -0.2; dy < 0.2; dy += 0.1) {
for (double dz = -0.2; dz < 0.2; dz += 0.1) {
double ref = (x + dx) * (y + dy) * ((x + dx) * (y + dy) + (z + dz));
Assert.assertEquals(ref, f.taylor(dx, dy, dz), 2.0e-15);
}
}
}
}
}
}
}
@Test
public void testTaylorAtan2() {
double[] expected = new double[] { 0.214, 0.0241, 0.00422, 6.48e-4, 8.04e-5 };
double x0 = 0.1;
double y0 = -0.3;
for (int maxOrder = 0; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(2, maxOrder);
DerivativeStructure dsX = factory.variable(0, x0);
DerivativeStructure dsY = factory.variable(1, y0);
DerivativeStructure atan2 = DerivativeStructure.atan2(dsY, dsX);
double maxError = 0;
for (double dx = -0.05; dx < 0.05; dx += 0.001) {
for (double dy = -0.05; dy < 0.05; dy += 0.001) {
double ref = FastMath.atan2(y0 + dy, x0 + dx);
maxError = FastMath.max(maxError, FastMath.abs(ref - atan2.taylor(dx, dy)));
}
}
Assert.assertEquals(0.0, expected[maxOrder] - maxError, 0.01 * expected[maxOrder]);
}
}
@Test
public void testAbs() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure minusOne = factory.variable(0, -1.0);
Assert.assertEquals(+1.0, FastMath.abs(minusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.abs(minusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusOne = factory.variable(0, +1.0);
Assert.assertEquals(+1.0, FastMath.abs(plusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.abs(plusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure minusZero = factory.variable(0, -0.0);
Assert.assertEquals(+0.0, FastMath.abs(minusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.abs(minusZero).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusZero = factory.variable(0, +0.0);
Assert.assertEquals(+0.0, FastMath.abs(plusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.abs(plusZero).getPartialDerivative(1), 1.0e-15);
}
@Override
@Test
public void testSign() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure minusOne = factory.variable(0, -1.0);
Assert.assertEquals(-1.0, FastMath.sign(minusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals( 0.0, FastMath.sign(minusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusOne = factory.variable(0, +1.0);
Assert.assertEquals(+1.0, FastMath.sign(plusOne).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals( 0.0, FastMath.sign(plusOne).getPartialDerivative(1), 1.0e-15);
DerivativeStructure minusZero = factory.variable(0, -0.0);
Assert.assertEquals(-0.0, FastMath.sign(minusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertTrue(Double.doubleToLongBits(FastMath.sign(minusZero).getValue()) < 0);
Assert.assertEquals( 0.0, FastMath.sign(minusZero).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusZero = factory.variable(0, +0.0);
Assert.assertEquals(+0.0, FastMath.sign(plusZero).getPartialDerivative(0), 1.0e-15);
Assert.assertTrue(Double.doubleToLongBits(FastMath.sign(plusZero).getValue()) == 0);
Assert.assertEquals( 0.0, FastMath.sign(plusZero).getPartialDerivative(1), 1.0e-15);
}
@Test
public void testCeilFloorRintLong() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure x = factory.variable(0, -1.5);
Assert.assertEquals(-1.5, x.getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, x.getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.ceil(x).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+0.0, FastMath.ceil(x).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-2.0, FastMath.floor(x).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+0.0, FastMath.floor(x).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-2.0, FastMath.rint(x).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+0.0, FastMath.rint(x).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-2.0, x.subtract(x.getField().getOne()).rint().getPartialDerivative(0), 1.0e-15);
}
@Test
public void testCopySign() {
DSFactory factory = new DSFactory(1, 1);
DerivativeStructure minusOne = factory.variable(0, -1.0);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, +1.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, +1.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, -1.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, -1.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, +0.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, +0.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, -0.0).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, -0.0).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(minusOne, Double.NaN).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(minusOne, Double.NaN).getPartialDerivative(1), 1.0e-15);
DerivativeStructure plusOne = factory.variable(0, +1.0);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+1.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+1.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-1.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-1.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+0.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(+0.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-0.0)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(-1.0, FastMath.copySign(plusOne, factory.constant(-0.0)).getPartialDerivative(1), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(Double.NaN)).getPartialDerivative(0), 1.0e-15);
Assert.assertEquals(+1.0, FastMath.copySign(plusOne, factory.constant(Double.NaN)).getPartialDerivative(1), 1.0e-15);
}
@Test
public void testToDegreesDefinition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
Assert.assertEquals(FastMath.toDegrees(x), dsX.toDegrees().getValue(), epsilon);
for (int n = 1; n <= maxOrder; ++n) {
if (n == 1) {
Assert.assertEquals(180 / FastMath.PI, dsX.toDegrees().getPartialDerivative(1), epsilon);
} else {
Assert.assertEquals(0.0, dsX.toDegrees().getPartialDerivative(n), epsilon);
}
}
}
}
}
@Test
public void testToRadiansDefinition() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
Assert.assertEquals(FastMath.toRadians(x), dsX.toRadians().getValue(), epsilon);
for (int n = 1; n <= maxOrder; ++n) {
if (n == 1) {
Assert.assertEquals(FastMath.PI / 180, dsX.toRadians().getPartialDerivative(1), epsilon);
} else {
Assert.assertEquals(0.0, dsX.toRadians().getPartialDerivative(n), epsilon);
}
}
}
}
}
@Test
public void testDegRad() {
double epsilon = 3.0e-16;
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure rebuiltX = dsX.toDegrees().toRadians();
DerivativeStructure zero = rebuiltX.subtract(dsX);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon);
}
}
}
}
@Test(expected=MathIllegalArgumentException.class)
public void testComposeMismatchedDimensions() {
new DSFactory(1, 3).variable(0, 1.2).compose(new double[3]);
}
@Test
public void testCompose() {
double[] epsilon = new double[] { 1.0e-20, 5.0e-14, 2.0e-13, 3.0e-13, 2.0e-13, 1.0e-20 };
PolynomialFunction poly =
new PolynomialFunction(new double[] { 1.0, 2.0, 3.0, 4.0, 5.0, 6.0 });
for (int maxOrder = 0; maxOrder < 6; ++maxOrder) {
DSFactory factory = new DSFactory(1, maxOrder);
PolynomialFunction[] p = new PolynomialFunction[maxOrder + 1];
p[0] = poly;
for (int i = 1; i <= maxOrder; ++i) {
p[i] = p[i - 1].polynomialDerivative();
}
for (double x = 0.1; x < 1.2; x += 0.001) {
DerivativeStructure dsX = factory.variable(0, x);
DerivativeStructure dsY1 = dsX.getField().getZero();
for (int i = poly.degree(); i >= 0; --i) {
dsY1 = dsY1.multiply(dsX).add(poly.getCoefficients()[i]);
}
double[] f = new double[maxOrder + 1];
for (int i = 0; i < f.length; ++i) {
f[i] = p[i].value(x);
}
DerivativeStructure dsY2 = dsX.compose(f);
DerivativeStructure zero = dsY1.subtract(dsY2);
for (int n = 0; n <= maxOrder; ++n) {
Assert.assertEquals(0.0, zero.getPartialDerivative(n), epsilon[n]);
}
}
}
}
@Test
public void testIntegration() {
// check that first-order integration on two variables does not depend on sequence of operations
final RandomGenerator random = new Well19937a(0x87bb96d6e11557bdl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
final DerivativeStructure i2fIxIy = f.integrate(0, 1).integrate(1, 1);
final DerivativeStructure i2fIyIx = f.integrate(1, 1).integrate(0, 1);
checkEquals(i2fIxIy, i2fIyIx, 0.);
}
}
@Test
public void testIntegrationGreaterThanOrder() {
// check that integration to a too high order generates zero
// as integration constants are set to zero
final RandomGenerator random = new Well19937a(0x4744a847b11e4c6fl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
for (int index = 0; index < factory.getCompiler().getFreeParameters(); ++index) {
final DerivativeStructure integ = f.integrate(index, factory.getCompiler().getOrder() + 1);
checkEquals(factory.constant(0), integ, 0.);
}
}
}
@Test
public void testIntegrationNoOp() {
// check that integration of order 0 is no-op
final RandomGenerator random = new Well19937a(0x75a35152f30f644bl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
for (int index = 0; index < factory.getCompiler().getFreeParameters(); ++index) {
final DerivativeStructure integ = f.integrate(index, 0);
checkEquals(f, integ, 0.);
}
}
}
@Test
public void testDifferentiationNoOp() {
// check that differentiation of order 0 is no-op
final RandomGenerator random = new Well19937a(0x3b6ae4c2f1282949l);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
for (int index = 0; index < factory.getCompiler().getFreeParameters(); ++index) {
final DerivativeStructure integ = f.differentiate(index, 0);
checkEquals(f, integ, 0.);
}
}
}
@Test
public void testIntegrationDifferentiation() {
// check that integration and differentiation for univariate functions are each other inverse except for constant
// term and highest order one
final RandomGenerator random = new Well19937a(0x67fe66c05e5ee222l);
final DSFactory factory = new DSFactory(1, 25);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 1; i < size - 1; i++) {
data[i] = random.nextDouble();
}
final int indexVar = 0;
final DerivativeStructure f = factory.build(data);
final DerivativeStructure f2 = f.integrate(indexVar, 1).differentiate(indexVar, 1);
final DerivativeStructure f3 = f.differentiate(indexVar, 1).integrate(indexVar, 1);
checkEquals(f2, f, 0.);
checkEquals(f2, f3, 0.);
// check special case when non-positive integration order actually returns differentiation
final DerivativeStructure df = f.integrate(indexVar, -1);
final DerivativeStructure df2 = f.differentiate(indexVar, 1);
checkEquals(df, df2, 0.);
// check special case when non-positive differentiation order actually returns integration
final DerivativeStructure fi = f.differentiate(indexVar, -1);
final DerivativeStructure fi2 = f.integrate(indexVar, 1);
checkEquals(fi, fi2, 0.);
}
}
@Test
public void testDifferentiation1() {
// check differentiation operator with result obtained manually
final int freeParam = 3;
final int order = 5;
final DSFactory factory = new DSFactory(freeParam, order);
final DerivativeStructure f = factory.variable(0, 1.0);
final int[] orders = new int[freeParam];
orders[0] = 2;
orders[1] = 1;
orders[2] = 1;
final double value = 10.;
f.setDerivativeComponent(factory.getCompiler().getPartialDerivativeIndex(orders), value);
final DerivativeStructure dfDx = f.differentiate(0, 1);
orders[0] -= 1;
Assert.assertEquals(1., dfDx.getPartialDerivative(new int[freeParam]), 0.);
Assert.assertEquals(value, dfDx.getPartialDerivative(orders), 0.);
checkEquals(factory.constant(0), f.differentiate(0, order + 1), 0.);
}
@Test
public void testDifferentiation2() {
// check that first-order differentiation twice is same as second-order differentiation
final RandomGenerator random = new Well19937a(0xec293aaee352de94l);
final DSFactory factory = new DSFactory(5, 4);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
final DerivativeStructure d2fDx2 = f.differentiate(0, 1).differentiate(0, 1);
final DerivativeStructure d2fDx2Bis = f.differentiate(0, 2);
checkEquals(d2fDx2, d2fDx2Bis, 0.);
}
}
@Test
public void testDifferentiation3() {
// check that first-order differentiation on two variables does not depend on sequence of operations
final RandomGenerator random = new Well19937a(0x35409ecc1348e46cl);
final DSFactory factory = new DSFactory(3, 7);
final int size = factory.getCompiler().getSize();
for (int count = 0; count < 100; ++count) {
final double[] data = new double[size];
for (int i = 0; i < size; i++) {
data[i] = random.nextDouble();
}
final DerivativeStructure f = factory.build(data);
final DerivativeStructure d2fDxDy = f.differentiate(0, 1).differentiate(1, 1);
final DerivativeStructure d2fDyDx = f.differentiate(1, 1).differentiate(0, 1);
checkEquals(d2fDxDy, d2fDyDx, 0.);
}
}
@Test
public void testField() {
for (int maxOrder = 1; maxOrder < 5; ++maxOrder) {
DSFactory factory = new DSFactory(3, maxOrder);
DerivativeStructure x = factory.variable(0, 1.0);
checkF0F1(x.getField().getZero(), 0.0, 0.0, 0.0, 0.0);
checkF0F1(x.getField().getOne(), 1.0, 0.0, 0.0, 0.0);
Assert.assertEquals(maxOrder, x.getField().getZero().getOrder());
Assert.assertEquals(3, x.getField().getZero().getFreeParameters());
Assert.assertEquals(DerivativeStructure.class, x.getField().getRuntimeClass());
}
}
@Test
public void testOneParameterConstructor() {
double x = 1.2;
double cos = FastMath.cos(x);
double sin = FastMath.sin(x);
DSFactory factory = new DSFactory(1, 4);
DerivativeStructure yRef = factory.variable(0, x).cos();
try {
new DSFactory(1, 4).build(0.0, 0.0);
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException dme) {
// expected
} catch (Exception e) {
Assert.fail("wrong exceptionc caught " + e.getClass().getName());
}
double[] derivatives = new double[] { cos, -sin, -cos, sin, cos };
DerivativeStructure y = factory.build(derivatives);
checkEquals(yRef, y, 1.0e-15);
UnitTestUtils.assertEquals(derivatives, y.getAllDerivatives(), 1.0e-15);
}
@Test
public void testOneOrderConstructor() {
DSFactory factory = new DSFactory(3, 1);
double x = 1.2;
double y = 2.4;
double z = 12.5;
DerivativeStructure xRef = factory.variable(0, x);
DerivativeStructure yRef = factory.variable(1, y);
DerivativeStructure zRef = factory.variable(2, z);
try {
new DSFactory(3, 1).build(x + y - z, 1.0, 1.0);
Assert.fail("an exception should have been thrown");
} catch (MathIllegalArgumentException dme) {
// expected
} catch (Exception e) {
Assert.fail("wrong exceptionc caught " + e.getClass().getName());
}
double[] derivatives = new double[] { x + y - z, 1.0, 1.0, -1.0 };
DerivativeStructure t = factory.build(derivatives);
checkEquals(xRef.add(yRef.subtract(zRef)), t, 1.0e-15);
UnitTestUtils.assertEquals(derivatives, xRef.add(yRef.subtract(zRef)).getAllDerivatives(), 1.0e-15);
}
@Test
public void testLinearCombination1DSDS() {
DSFactory factory = new DSFactory(6, 1);
final DerivativeStructure[] a = new DerivativeStructure[] {
factory.variable(0, -1321008684645961.0 / 268435456.0),
factory.variable(1, -5774608829631843.0 / 268435456.0),
factory.variable(2, -7645843051051357.0 / 8589934592.0)
};
final DerivativeStructure[] b = new DerivativeStructure[] {
factory.variable(3, -5712344449280879.0 / 2097152.0),
factory.variable(4, -4550117129121957.0 / 2097152.0),
factory.variable(5, 8846951984510141.0 / 131072.0)
};
final DerivativeStructure abSumInline = a[0].linearCombination(a[0], b[0], a[1], b[1], a[2], b[2]);
final DerivativeStructure abSumArray = a[0].linearCombination(a, b);
Assert.assertEquals(abSumInline.getValue(), abSumArray.getValue(), 0);
Assert.assertEquals(-1.8551294182586248737720779899, abSumInline.getValue(), 1.0e-15);
Assert.assertEquals(b[0].getValue(), abSumInline.getPartialDerivative(1, 0, 0, 0, 0, 0), 1.0e-15);
Assert.assertEquals(b[1].getValue(), abSumInline.getPartialDerivative(0, 1, 0, 0, 0, 0), 1.0e-15);
Assert.assertEquals(b[2].getValue(), abSumInline.getPartialDerivative(0, 0, 1, 0, 0, 0), 1.0e-15);
Assert.assertEquals(a[0].getValue(), abSumInline.getPartialDerivative(0, 0, 0, 1, 0, 0), 1.0e-15);
Assert.assertEquals(a[1].getValue(), abSumInline.getPartialDerivative(0, 0, 0, 0, 1, 0), 1.0e-15);
Assert.assertEquals(a[2].getValue(), abSumInline.getPartialDerivative(0, 0, 0, 0, 0, 1), 1.0e-15);
}
@Test
public void testLinearCombination1DoubleDS() {
DSFactory factory = new DSFactory(3, 1);
final double[] a = new double[] {
-1321008684645961.0 / 268435456.0,
-5774608829631843.0 / 268435456.0,
-7645843051051357.0 / 8589934592.0
};
final DerivativeStructure[] b = new DerivativeStructure[] {
factory.variable(0, -5712344449280879.0 / 2097152.0),
factory.variable(1, -4550117129121957.0 / 2097152.0),
factory.variable(2, 8846951984510141.0 / 131072.0)
};
final DerivativeStructure abSumInline = b[0].linearCombination(a[0], b[0],
a[1], b[1],
a[2], b[2]);
final DerivativeStructure abSumArray = b[0].linearCombination(a, b);
Assert.assertEquals(abSumInline.getValue(), abSumArray.getValue(), 0);
Assert.assertEquals(-1.8551294182586248737720779899, abSumInline.getValue(), 1.0e-15);
Assert.assertEquals(a[0], abSumInline.getPartialDerivative(1, 0, 0), 1.0e-15);
Assert.assertEquals(a[1], abSumInline.getPartialDerivative(0, 1, 0), 1.0e-15);
Assert.assertEquals(a[2], abSumInline.getPartialDerivative(0, 0, 1), 1.0e-15);
}
@Test
public void testLinearCombination2DSDS() {
// we compare accurate versus naive dot product implementations
// on regular vectors (i.e. not extreme cases like in the previous test)
Well1024a random = new Well1024a(0xc6af886975069f11l);
DSFactory factory = new DSFactory(4, 1);
for (int i = 0; i < 10000; ++i) {
final DerivativeStructure[] u = new DerivativeStructure[factory.getCompiler().getFreeParameters()];
final DerivativeStructure[] v = new DerivativeStructure[factory.getCompiler().getFreeParameters()];
for (int j = 0; j < u.length; ++j) {
u[j] = factory.variable(j, 1e17 * random.nextDouble());
v[j] = factory.constant(1e17 * random.nextDouble());
}
DerivativeStructure lin = u[0].linearCombination(u[0], v[0], u[1], v[1]);
double ref = u[0].getValue() * v[0].getValue() +
u[1].getValue() * v[1].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(v[0].getValue(), lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(v[1].getValue(), lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
lin = u[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2]);
ref = u[0].getValue() * v[0].getValue() +
u[1].getValue() * v[1].getValue() +
u[2].getValue() * v[2].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(v[0].getValue(), lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(v[1].getValue(), lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(v[2].getValue(), lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
lin = u[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2], u[3], v[3]);
ref = u[0].getValue() * v[0].getValue() +
u[1].getValue() * v[1].getValue() +
u[2].getValue() * v[2].getValue() +
u[3].getValue() * v[3].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(v[0].getValue(), lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(v[1].getValue(), lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(v[2].getValue(), lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
Assert.assertEquals(v[3].getValue(), lin.getPartialDerivative(0, 0, 0, 1), 1.0e-15 * FastMath.abs(v[3].getValue()));
}
}
@Test
public void testLinearCombination2DoubleDS() {
// we compare accurate versus naive dot product implementations
// on regular vectors (i.e. not extreme cases like in the previous test)
Well1024a random = new Well1024a(0xc6af886975069f11l);
DSFactory factory = new DSFactory(4, 1);
for (int i = 0; i < 10000; ++i) {
final double[] u = new double[4];
final DerivativeStructure[] v = new DerivativeStructure[factory.getCompiler().getFreeParameters()];
for (int j = 0; j < u.length; ++j) {
u[j] = 1e17 * random.nextDouble();
v[j] = factory.variable(j, 1e17 * random.nextDouble());
}
DerivativeStructure lin = v[0].linearCombination(u[0], v[0], u[1], v[1]);
double ref = u[0] * v[0].getValue() +
u[1] * v[1].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(u[0], lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(u[1], lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
lin = v[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2]);
ref = u[0] * v[0].getValue() +
u[1] * v[1].getValue() +
u[2] * v[2].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(u[0], lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(u[1], lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(u[2], lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
lin = v[0].linearCombination(u[0], v[0], u[1], v[1], u[2], v[2], u[3], v[3]);
ref = u[0] * v[0].getValue() +
u[1] * v[1].getValue() +
u[2] * v[2].getValue() +
u[3] * v[3].getValue();
Assert.assertEquals(ref, lin.getValue(), 1.0e-15 * FastMath.abs(ref));
Assert.assertEquals(u[0], lin.getPartialDerivative(1, 0, 0, 0), 1.0e-15 * FastMath.abs(v[0].getValue()));
Assert.assertEquals(u[1], lin.getPartialDerivative(0, 1, 0, 0), 1.0e-15 * FastMath.abs(v[1].getValue()));
Assert.assertEquals(u[2], lin.getPartialDerivative(0, 0, 1, 0), 1.0e-15 * FastMath.abs(v[2].getValue()));
Assert.assertEquals(u[3], lin.getPartialDerivative(0, 0, 0, 1), 1.0e-15 * FastMath.abs(v[3].getValue()));
}
}
@Test
public void testSerialization() {
DerivativeStructure a = new DSFactory(3, 2).variable(0, 1.3);
DerivativeStructure b = (DerivativeStructure) UnitTestUtils.serializeAndRecover(a);
Assert.assertEquals(a.getFreeParameters(), b.getFreeParameters());
Assert.assertEquals(a.getOrder(), b.getOrder());
checkEquals(a, b, 1.0e-15);
}
@Test
public void testZero() {
DerivativeStructure zero = new DSFactory(3, 2).variable(2, 17.0).getField().getZero();
double[] a = zero.getAllDerivatives();
Assert.assertEquals(10, a.length);
for (int i = 0; i < a.length; ++i) {
Assert.assertEquals(0.0, a[i], 1.0e-15);
}
}
@Test
public void testOne() {
DerivativeStructure one = new DSFactory(3, 2).variable(2, 17.0).getField().getOne();
double[] a = one.getAllDerivatives();
Assert.assertEquals(10, a.length);
for (int i = 0; i < a.length; ++i) {
Assert.assertEquals(i == 0 ? 1.0 : 0.0, a[i], 1.0e-15);
}
}
@Test
public void testMap() {
List<int[]> pairs = new ArrayList<>();
for (int parameters = 1; parameters < 5; ++parameters) {
for (int order = 0; order < 3; ++order) {
pairs.add(new int[] { parameters, order });
}
}
Map<Field<?>, Integer> map = new HashMap<>();
for (int i = 0; i < 1000; ++i) {
// create a brand new factory for each derivative
int parameters = pairs.get(i % pairs.size())[0];
int order = pairs.get(i % pairs.size())[1];
map.put(new DSFactory(parameters, order).constant(17.0).getField(), 0);
}
// despite we have created numerous factories,
// there should be only one field for each pair parameters/order
Assert.assertEquals(pairs.size(), map.size());
@SuppressWarnings("unchecked")
Field<DerivativeStructure> first = (Field<DerivativeStructure>) map.entrySet().iterator().next().getKey();
Assert.assertTrue(first.equals(first));
Assert.assertFalse(first.equals(Decimal64Field.getInstance()));
}
@Test
public void testRebaseConditions() {
final DSFactory f32 = new DSFactory(3, 2);
final DSFactory f22 = new DSFactory(2, 2);
final DSFactory f31 = new DSFactory(3, 1);
try {
f32.variable(0, 0).rebase(f22.variable(0, 0), f22.variable(1, 1.0));
} catch (MathIllegalArgumentException miae) {
Assert.assertEquals(LocalizedCoreFormats.DIMENSIONS_MISMATCH, miae.getSpecifier());
Assert.assertEquals(3, ((Integer) miae.getParts()[0]).intValue());
Assert.assertEquals(2, ((Integer) miae.getParts()[1]).intValue());
}
try {
f32.variable(0, 0).rebase(f31.variable(0, 0), f31.variable(1, 1.0), f31.variable(2, 2.0));
} catch (MathIllegalArgumentException miae) {
Assert.assertEquals(LocalizedCoreFormats.DIMENSIONS_MISMATCH, miae.getSpecifier());
Assert.assertEquals(2, ((Integer) miae.getParts()[0]).intValue());
Assert.assertEquals(1, ((Integer) miae.getParts()[1]).intValue());
}
}
@Test
public void testRebaseValueMoreIntermediateThanBase() {
doTestRebaseValue(createBaseVariables(new DSFactory(2, 4), 1.5, -2.0),
q -> new DerivativeStructure[] {
q[0].add(q[1].multiply(3)),
q[0].log(),
q[1].divide(q[0].sin())
},
new DSFactory(3, 4),
p -> p[0].add(p[1].divide(p[2])),
1.0e-15);
}
@Test
public void testRebaseValueLessIntermediateThanBase() {
doTestRebaseValue(createBaseVariables(new DSFactory(3, 4), 1.5, -2.0, 0.5),
q -> new DerivativeStructure[] {
q[0].add(q[1].multiply(3)),
q[0].add(q[1]).subtract(q[2])
},
new DSFactory(2, 4),
p -> p[0].multiply(p[1]),
1.0e-15);
}
@Test
public void testRebaseValueEqualIntermediateAndBase() {
doTestRebaseValue(createBaseVariables(new DSFactory(2, 4), 1.5, -2.0),
q -> new DerivativeStructure[] {
q[0].add(q[1].multiply(3)),
q[0].add(q[1])
},
new DSFactory(2, 4),
p -> p[0].multiply(p[1]),
1.0e-15);
}
private void doTestRebaseValue(final DerivativeStructure[] q,
final CalculusFieldMultivariateVectorFunction<DerivativeStructure> qToP,
final DSFactory factoryP,
final CalculusFieldMultivariateFunction<DerivativeStructure> f,
final double tol) {
// intermediate variables as functions of base variables
final DerivativeStructure[] pBase = qToP.value(q);
// reference function
final DerivativeStructure ref = f.value(pBase);
// intermediate variables as independent variables
final DerivativeStructure[] pIntermediate = creatIntermediateVariables(factoryP, pBase);
// function of the intermediate variables
final DerivativeStructure fI = f.value(pIntermediate);
// function rebased to base variables
final DerivativeStructure rebased = fI.rebase(pBase);
Assert.assertEquals(q[0].getFreeParameters(), ref.getFreeParameters());
Assert.assertEquals(q[0].getOrder(), ref.getOrder());
Assert.assertEquals(factoryP.getCompiler().getFreeParameters(), fI.getFreeParameters());
Assert.assertEquals(factoryP.getCompiler().getOrder(), fI.getOrder());
Assert.assertEquals(ref.getFreeParameters(), rebased.getFreeParameters());
Assert.assertEquals(ref.getOrder(), rebased.getOrder());
checkEquals(ref, rebased, tol);
}
final DerivativeStructure[] createBaseVariables(final DSFactory factory, double... q) {
final DerivativeStructure[] qDS = new DerivativeStructure[q.length];
for (int i = 0; i < q.length; ++i) {
qDS[i] = factory.variable(i, q[i]);
}
return qDS;
}
final DerivativeStructure[] creatIntermediateVariables(final DSFactory factory, DerivativeStructure... pBase) {
final DerivativeStructure[] pIntermediate = new DerivativeStructure[pBase.length];
for (int i = 0; i < pBase.length; ++i) {
pIntermediate[i] = factory.variable(i, pBase[i].getReal());
}
return pIntermediate;
}
@Test
public void testRunTimeClass() {
Field<DerivativeStructure> field = new DSFactory(3, 2).constant(0.0).getField();
Assert.assertEquals(DerivativeStructure.class, field.getRuntimeClass());
}
private void checkF0F1(DerivativeStructure ds, double value, double...derivatives) {
// check dimension
Assert.assertEquals(derivatives.length, ds.getFreeParameters());
// check value, directly and also as 0th order derivative
Assert.assertEquals(value, ds.getValue(), 1.0e-15);
Assert.assertEquals(value, ds.getPartialDerivative(new int[ds.getFreeParameters()]), 1.0e-15);
// check first order derivatives
for (int i = 0; i < derivatives.length; ++i) {
int[] orders = new int[derivatives.length];
orders[i] = 1;
Assert.assertEquals(derivatives[i], ds.getPartialDerivative(orders), 1.0e-15);
}
}
private void checkEquals(DerivativeStructure ds1, DerivativeStructure ds2, double epsilon) {
// check dimension
Assert.assertEquals(ds1.getFreeParameters(), ds2.getFreeParameters());
Assert.assertEquals(ds1.getOrder(), ds2.getOrder());
int[] derivatives = new int[ds1.getFreeParameters()];
int sum = 0;
while (true) {
if (sum <= ds1.getOrder()) {
Assert.assertEquals(ds1.getPartialDerivative(derivatives),
ds2.getPartialDerivative(derivatives),
epsilon);
}
boolean increment = true;
sum = 0;
for (int i = derivatives.length - 1; i >= 0; --i) {
if (increment) {
if (derivatives[i] == ds1.getOrder()) {
derivatives[i] = 0;
} else {
derivatives[i]++;
increment = false;
}
}
sum += derivatives[i];
}
if (increment) {
return;
}
}
}
}
| Added Romain's Taylor map composition as a reference implementation. | hipparchus-core/src/test/java/org/hipparchus/analysis/differentiation/DerivativeStructureTest.java | Added Romain's Taylor map composition as a reference implementation. |
|
Java | bsd-2-clause | 8c528848a2584330ed8018b65f5b2d3d0b09ab7d | 0 | SecureSkyTechnology/sst-devtools-alter-proxy,SecureSkyTechnology/sst-devtools-alter-proxy | package com.sst.devtools.alterproxy.swingui;
import java.awt.BorderLayout;
import java.awt.event.WindowAdapter;
import java.awt.event.WindowEvent;
import javax.swing.JFrame;
public class MainWindow {
private JFrame frame;
private MainPanel mainPanel;
/**
* Create the application.
*/
public MainWindow() {
initialize();
}
/**
* デフォルトで生成された public static void main() の中で呼ばれていた
* setVisible(true)を外部から可能とするために手作業で追加したpublicメソッド。
*/
public void show() {
frame.setVisible(true);
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frame = new JFrame();
frame.setTitle("alter-proxy");
frame.setBounds(100, 100, 700, 400);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(new BorderLayout(0, 0));
mainPanel = new MainPanel();
frame.getContentPane().add(mainPanel, BorderLayout.CENTER);
frame.addWindowListener(new WindowAdapter() {
@Override
public void windowClosing(WindowEvent e) {
mainPanel.saveConfig();
}
});
}
}
| src/main/java/com/sst/devtools/alterproxy/swingui/MainWindow.java | package com.sst.devtools.alterproxy.swingui;
import java.awt.BorderLayout;
import javax.swing.JFrame;
public class MainWindow {
private JFrame frame;
private MainPanel mainPanel;
/**
* Create the application.
*/
public MainWindow() {
initialize();
}
/**
* デフォルトで生成された public static void main() の中で呼ばれていた
* setVisible(true)を外部から可能とするために手作業で追加したpublicメソッド。
*/
public void show() {
frame.setVisible(true);
}
/**
* Initialize the contents of the frame.
*/
private void initialize() {
frame = new JFrame();
frame.setTitle("alter-proxy");
frame.setBounds(100, 100, 700, 400);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.getContentPane().setLayout(new BorderLayout(0, 0));
mainPanel = new MainPanel();
frame.getContentPane().add(mainPanel, BorderLayout.CENTER);
}
}
| アプリ終了時に設定を自動保存するよう修正。 | src/main/java/com/sst/devtools/alterproxy/swingui/MainWindow.java | アプリ終了時に設定を自動保存するよう修正。 |
|
Java | bsd-3-clause | fd8bef1a797fde5eaed00a6041b4c30249be7964 | 0 | NCIP/cananolab,NCIP/cananolab,NCIP/cananolab | package gov.nih.nci.cananolab.dto.particle.characterization;
import gov.nih.nci.cananolab.domain.characterization.invitro.Cytotoxicity;
import gov.nih.nci.cananolab.domain.characterization.invitro.EnzymeInduction;
import gov.nih.nci.cananolab.domain.characterization.invitro.Transfection;
import gov.nih.nci.cananolab.domain.characterization.physical.PhysicalState;
import gov.nih.nci.cananolab.domain.characterization.physical.Shape;
import gov.nih.nci.cananolab.domain.characterization.physical.Solubility;
import gov.nih.nci.cananolab.domain.characterization.physical.Surface;
import gov.nih.nci.cananolab.domain.common.Datum;
import gov.nih.nci.cananolab.domain.common.ExperimentConfig;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Instrument;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.dto.common.ExperimentConfigBean;
import gov.nih.nci.cananolab.dto.common.FindingBean;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.common.ProtocolBean;
import gov.nih.nci.cananolab.util.ClassUtils;
import gov.nih.nci.cananolab.util.Constants;
import gov.nih.nci.cananolab.util.DateUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
/**
* This class represents shared characterization properties to be shown in
* characterization view pages.
*
* @author pansu
*
*/
public class CharacterizationBean {
private PointOfContactBean pocBean = new PointOfContactBean();
private String conclusion;
private String description;
private String assayType;
private ExperimentConfigBean theExperimentConfig = new ExperimentConfigBean();
private FindingBean theFinding = new FindingBean();
private Instrument theInstrument = new Instrument();
private List<ExperimentConfigBean> experimentConfigs = new ArrayList<ExperimentConfigBean>();
private List<FindingBean> findings = new ArrayList<FindingBean>();
private ProtocolBean protocolBean = new ProtocolBean();
private Characterization domainChar;
private String className;
private String dateString;
private String characterizationType;
private String characterizationName;
private Cytotoxicity cytotoxicity = new Cytotoxicity();
private PhysicalState physicalState = new PhysicalState();
private Shape shape = new Shape();
private Solubility solubility = new Solubility();
private Surface surface = new Surface();
private EnzymeInduction enzymeInduction = new EnzymeInduction();
private boolean withProperties = false;
private Transfection transfection = new Transfection();
public CharacterizationBean() {
}
public CharacterizationBean(Characterization chara) {
domainChar = chara;
className = ClassUtils.getShortClassName(chara.getClass().getName());
this.description = chara.getDesignMethodsDescription();
this.assayType = chara.getAssayType();
this.conclusion = chara.getAnalysisConclusion();
if (chara != null) {
PointOfContact poc = chara.getPointOfContact();
if (poc != null)
pocBean = new PointOfContactBean(poc);
}
this.dateString = DateUtils.convertDateToString(chara.getDate(),
Constants.DATE_FORMAT);
if (chara.getFindingCollection() != null) {
for (Finding finding : chara.getFindingCollection()) {
findings.add(new FindingBean(finding));
}
}
if (chara.getProtocol() != null) {
protocolBean = new ProtocolBean(chara.getProtocol());
}
if (chara.getExperimentConfigCollection() != null) {
for (ExperimentConfig config : chara
.getExperimentConfigCollection()) {
experimentConfigs.add(new ExperimentConfigBean(config));
}
}
if (chara instanceof Shape) {
shape = (Shape) chara;
withProperties = true;
} else if (chara instanceof PhysicalState) {
physicalState = (PhysicalState) chara;
withProperties = true;
} else if (chara instanceof Solubility) {
solubility = (Solubility) chara;
withProperties = true;
} else if (chara instanceof Surface) {
surface = (Surface) chara;
withProperties = true;
} else if (chara instanceof Cytotoxicity) {
cytotoxicity = (Cytotoxicity) chara;
withProperties = true;
} else if (chara instanceof EnzymeInduction) {
enzymeInduction = (EnzymeInduction) chara;
withProperties = true;
} else if (chara instanceof Transfection) {
transfection = (Transfection) chara;
withProperties = true;
} else {
withProperties = false;
}
}
public Characterization getDomainCopy(boolean copyDerivedDatum) {
Characterization copy = (Characterization) ClassUtils
.deepCopy(domainChar);
// clear Ids, reset createdBy and createdDate, add prefix to
copy.setId(null);
copy.setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
copy.setCreatedDate(new Date());
if (copy.getExperimentConfigCollection().isEmpty()) {
copy.setExperimentConfigCollection(null);
} else {
Collection<ExperimentConfig> configs = copy
.getExperimentConfigCollection();
copy.setExperimentConfigCollection(new HashSet<ExperimentConfig>());
copy.getExperimentConfigCollection().addAll(configs);
for (ExperimentConfig config : copy.getExperimentConfigCollection()) {
config.setId(null);
config.setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
config.setCreatedDate(new Date());
}
}
if (copy.getFindingCollection().isEmpty()) {
copy.setFindingCollection(null);
} else {
Collection<Finding> findings = copy.getFindingCollection();
copy.setFindingCollection(new HashSet<Finding>());
copy.getFindingCollection().addAll(findings);
for (Finding finding : copy.getFindingCollection()) {
for (Datum datum : finding.getDatumCollection()) {
datum.setId(null);
datum.setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
datum.setCreatedDate(new Date());
// TODO::
// if (bioassay.getFile() != null) {
//
// bioassay.getFile().setId(null);
// bioassay.getFile().setCreatedBy(
// Constants.AUTO_COPY_ANNOTATION_PREFIX);
// bioassay.getFile().setCreatedDate(new Date());
// }
// if (bioassay.getDerivedDatumCollection().isEmpty()
// || !copyDerivedDatum) {
// bioassay.setDerivedDatumCollection(null);
// } else {
// Collection<DerivedDatum> data = bioassay
// .getDerivedDatumCollection();
// bioassay
// .setDerivedDatumCollection(new HashSet<DerivedDatum>());
// bioassay.getDerivedDatumCollection().addAll(data);
// for (DerivedDatum datum : bioassay
// .getDerivedDatumCollection()) {
// datum.setId(null);
// datum
// .setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
// datum.setCreatedDate(new Date());
// }
// }
}
}
}
return copy;
}
public void setupDomain(String createdBy) throws Exception {
// take care of characterizations that don't have any special
// properties shown in the form, e.g. Size
if (domainChar == null) {
Class clazz = ClassUtils.getFullClass(className);
domainChar = (Characterization) clazz.newInstance();
}
if (domainChar instanceof Shape) {
domainChar = shape;
} else if (domainChar instanceof Solubility) {
domainChar = solubility;
} else if (domainChar instanceof PhysicalState) {
domainChar = physicalState;
} else if (domainChar instanceof Surface) {
domainChar = surface;
} else if (domainChar instanceof Cytotoxicity) {
domainChar = cytotoxicity;
} else if (domainChar instanceof EnzymeInduction) {
domainChar = enzymeInduction;
} else if (domainChar instanceof Transfection) {
domainChar = transfection;
}
if (domainChar.getId() == null
|| domainChar.getCreatedBy() != null
&& domainChar.getCreatedBy().equals(
Constants.AUTO_COPY_ANNOTATION_PREFIX)) {
domainChar.setCreatedBy(createdBy);
domainChar.setCreatedDate(new Date());
}
domainChar.setDesignMethodsDescription(description);
domainChar.setAssayType(assayType);
domainChar.setAnalysisConclusion(conclusion);
if (pocBean != null && pocBean.getDomain().getId() != null
&& pocBean.getDomain().getId() != 0) {
domainChar.setPointOfContact(pocBean.getDomain());
} else {
domainChar.setPointOfContact(null);
}
domainChar.setDate(DateUtils.convertToDate(dateString,
Constants.DATE_FORMAT));
if (domainChar.getExperimentConfigCollection() != null) {
domainChar.getExperimentConfigCollection().clear();
} else {
domainChar
.setExperimentConfigCollection(new HashSet<ExperimentConfig>());
}
for (ExperimentConfigBean config : experimentConfigs) {
domainChar.getExperimentConfigCollection().add(config.getDomain());
}
if (protocolBean != null && protocolBean.getDomain().getName() != null) {
domainChar.setProtocol(protocolBean.getDomain());
} else {
domainChar.setProtocol(null);
}
if (domainChar.getFindingCollection() != null) {
domainChar.getFindingCollection().clear();
} else {
domainChar.setFindingCollection(new HashSet<Finding>());
}
for (FindingBean findingBean : findings) {
domainChar.getFindingCollection().add(findingBean.getDomain());
}
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
public ProtocolBean getProtocolBean() {
return protocolBean;
}
public Characterization getDomainChar() {
return domainChar;
}
public String getClassName() {
return className;
}
public void setClassName(String className) throws Exception {
this.className = className;
}
public List<ExperimentConfigBean> getExperimentConfigs() {
return experimentConfigs;
}
public ExperimentConfigBean getTheExperimentConfig() {
return theExperimentConfig;
}
public void setTheExperimentConfig(ExperimentConfigBean theExperimentConfig) {
this.theExperimentConfig = theExperimentConfig;
}
public void addExperimentConfig(ExperimentConfigBean experimentConfigBean) {
// if an old one exists, remove it first
if (experimentConfigs.contains(experimentConfigBean)) {
removeExperimentConfig(experimentConfigBean);
}
experimentConfigs.add(experimentConfigBean);
}
public void removeExperimentConfig(ExperimentConfigBean experimentConfigBean) {
experimentConfigs.remove(experimentConfigBean);
}
public Instrument getTheInstrument() {
return theInstrument;
}
public void setTheInstrument(Instrument theInstrument) {
this.theInstrument = theInstrument;
}
public String getDateString() {
return dateString;
}
public void setDateString(String dateString) {
this.dateString = dateString;
}
public PointOfContactBean getPocBean() {
return pocBean;
}
public void setPocBean(PointOfContactBean pocBean) {
this.pocBean = pocBean;
}
/**
* @return the theFinding
*/
public FindingBean getTheFinding() {
return theFinding;
}
/**
* @param theFinding
* the theFinding to set
*/
public void setTheFinding(FindingBean theFinding) {
this.theFinding = theFinding;
}
public void addFinding(FindingBean dataSetBean) {
// if an old one exists, remove it first
int index = findings.indexOf(dataSetBean);
if (index != -1) {
findings.remove(dataSetBean);
// retain the original order
findings.add(index, dataSetBean);
} else {
findings.add(dataSetBean);
}
}
public void removeFinding(FindingBean dataSetBean) {
findings.remove(dataSetBean);
}
/**
* @return the findings
*/
public List<FindingBean> getFindings() {
return findings;
}
public String getAssayCategory() {
return characterizationType;
}
public void setAssayCategory(String assayCategory) {
this.characterizationType = assayCategory;
}
public Cytotoxicity getCytotoxicity() {
return cytotoxicity;
}
public void setCytotoxicity(Cytotoxicity cytotoxicity) {
this.cytotoxicity = cytotoxicity;
}
public PhysicalState getPhysicalState() {
return physicalState;
}
public void setPhysicalState(PhysicalState physicalState) {
this.physicalState = physicalState;
}
public Shape getShape() {
return shape;
}
public void setShape(Shape shape) {
this.shape = shape;
}
public Solubility getSolubility() {
return solubility;
}
public void setSolubility(Solubility solubility) {
this.solubility = solubility;
}
public String getCharacterizationType() {
return characterizationType;
}
public void setCharacterizationType(String characterizationType) {
this.characterizationType = characterizationType;
}
public String getCharacterizationName() {
return characterizationName;
}
public void setCharacterizationName(String characterizationName) {
this.characterizationName = characterizationName;
}
public String getConclusion() {
return conclusion;
}
public void setConclusion(String conclusion) {
this.conclusion = conclusion;
}
public String getAssayType() {
return assayType;
}
public void setAssayType(String assayType) {
this.assayType = assayType;
}
public EnzymeInduction getEnzymeInduction() {
return enzymeInduction;
}
public void setEnzymeInduction(EnzymeInduction enzymeInduction) {
this.enzymeInduction = enzymeInduction;
}
public Surface getSurface() {
return surface;
}
public void setSurface(Surface surface) {
this.surface = surface;
}
public boolean isWithProperties() {
return withProperties;
}
public Transfection getTransfection() {
return transfection;
}
public void setTransfection(Transfection transfection) {
this.transfection = transfection;
}
}
| src/gov/nih/nci/cananolab/dto/particle/characterization/CharacterizationBean.java | package gov.nih.nci.cananolab.dto.particle.characterization;
import gov.nih.nci.cananolab.domain.characterization.invitro.Cytotoxicity;
import gov.nih.nci.cananolab.domain.characterization.invitro.EnzymeInduction;
import gov.nih.nci.cananolab.domain.characterization.physical.PhysicalState;
import gov.nih.nci.cananolab.domain.characterization.physical.Shape;
import gov.nih.nci.cananolab.domain.characterization.physical.Solubility;
import gov.nih.nci.cananolab.domain.characterization.physical.Surface;
import gov.nih.nci.cananolab.domain.common.Datum;
import gov.nih.nci.cananolab.domain.common.ExperimentConfig;
import gov.nih.nci.cananolab.domain.common.Finding;
import gov.nih.nci.cananolab.domain.common.Instrument;
import gov.nih.nci.cananolab.domain.common.PointOfContact;
import gov.nih.nci.cananolab.domain.particle.Characterization;
import gov.nih.nci.cananolab.dto.common.ExperimentConfigBean;
import gov.nih.nci.cananolab.dto.common.FindingBean;
import gov.nih.nci.cananolab.dto.common.PointOfContactBean;
import gov.nih.nci.cananolab.dto.common.ProtocolBean;
import gov.nih.nci.cananolab.util.ClassUtils;
import gov.nih.nci.cananolab.util.Constants;
import gov.nih.nci.cananolab.util.DateUtils;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.HashSet;
import java.util.List;
/**
* This class represents shared characterization properties to be shown in
* characterization view pages.
*
* @author pansu
*
*/
public class CharacterizationBean {
private PointOfContactBean pocBean = new PointOfContactBean();
private String conclusion;
private String description;
private String assayType;
private ExperimentConfigBean theExperimentConfig = new ExperimentConfigBean();
private FindingBean theFinding = new FindingBean();
private Instrument theInstrument = new Instrument();
private List<ExperimentConfigBean> experimentConfigs = new ArrayList<ExperimentConfigBean>();
private List<FindingBean> findings = new ArrayList<FindingBean>();
private ProtocolBean protocolBean = new ProtocolBean();
private Characterization domainChar;
private String className;
private String dateString;
private String characterizationType;
private String characterizationName;
private Cytotoxicity cytotoxicity = new Cytotoxicity();
private PhysicalState physicalState = new PhysicalState();
private Shape shape = new Shape();
private Solubility solubility = new Solubility();
private Surface surface = new Surface();
private EnzymeInduction enzymeInduction = new EnzymeInduction();
private boolean withProperties = false;
public CharacterizationBean() {
}
public CharacterizationBean(Characterization chara) {
domainChar = chara;
className = ClassUtils.getShortClassName(chara.getClass().getName());
this.description = chara.getDesignMethodsDescription();
this.assayType = chara.getAssayType();
this.conclusion = chara.getAnalysisConclusion();
if (chara != null) {
PointOfContact poc = chara.getPointOfContact();
if (poc != null)
pocBean = new PointOfContactBean(poc);
}
this.dateString = DateUtils.convertDateToString(chara.getDate(),
Constants.DATE_FORMAT);
if (chara.getFindingCollection() != null) {
for (Finding finding : chara.getFindingCollection()) {
findings.add(new FindingBean(finding));
}
}
if (chara.getProtocol() != null) {
protocolBean = new ProtocolBean(chara.getProtocol());
}
if (chara.getExperimentConfigCollection() != null) {
for (ExperimentConfig config : chara
.getExperimentConfigCollection()) {
experimentConfigs.add(new ExperimentConfigBean(config));
}
}
if (chara instanceof Shape) {
shape = (Shape) chara;
withProperties = true;
} else if (chara instanceof PhysicalState) {
physicalState = (PhysicalState) chara;
withProperties = true;
} else if (chara instanceof Solubility) {
solubility = (Solubility) chara;
withProperties = true;
} else if (chara instanceof Surface) {
surface = (Surface) chara;
withProperties = true;
} else if (chara instanceof Cytotoxicity) {
cytotoxicity = (Cytotoxicity) chara;
withProperties = true;
} else if (chara instanceof EnzymeInduction) {
enzymeInduction = (EnzymeInduction) chara;
withProperties = true;
} else {
withProperties = false;
}
}
public Characterization getDomainCopy(boolean copyDerivedDatum) {
Characterization copy = (Characterization) ClassUtils
.deepCopy(domainChar);
// clear Ids, reset createdBy and createdDate, add prefix to
copy.setId(null);
copy.setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
copy.setCreatedDate(new Date());
if (copy.getExperimentConfigCollection().isEmpty()) {
copy.setExperimentConfigCollection(null);
} else {
Collection<ExperimentConfig> configs = copy
.getExperimentConfigCollection();
copy.setExperimentConfigCollection(new HashSet<ExperimentConfig>());
copy.getExperimentConfigCollection().addAll(configs);
for (ExperimentConfig config : copy.getExperimentConfigCollection()) {
config.setId(null);
config.setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
config.setCreatedDate(new Date());
}
}
if (copy.getFindingCollection().isEmpty()) {
copy.setFindingCollection(null);
} else {
Collection<Finding> findings = copy.getFindingCollection();
copy.setFindingCollection(new HashSet<Finding>());
copy.getFindingCollection().addAll(findings);
for (Finding finding : copy.getFindingCollection()) {
for (Datum datum : finding.getDatumCollection()) {
datum.setId(null);
datum.setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
datum.setCreatedDate(new Date());
// TODO::
// if (bioassay.getFile() != null) {
//
// bioassay.getFile().setId(null);
// bioassay.getFile().setCreatedBy(
// Constants.AUTO_COPY_ANNOTATION_PREFIX);
// bioassay.getFile().setCreatedDate(new Date());
// }
// if (bioassay.getDerivedDatumCollection().isEmpty()
// || !copyDerivedDatum) {
// bioassay.setDerivedDatumCollection(null);
// } else {
// Collection<DerivedDatum> data = bioassay
// .getDerivedDatumCollection();
// bioassay
// .setDerivedDatumCollection(new HashSet<DerivedDatum>());
// bioassay.getDerivedDatumCollection().addAll(data);
// for (DerivedDatum datum : bioassay
// .getDerivedDatumCollection()) {
// datum.setId(null);
// datum
// .setCreatedBy(Constants.AUTO_COPY_ANNOTATION_PREFIX);
// datum.setCreatedDate(new Date());
// }
// }
}
}
}
return copy;
}
public void setupDomain(String createdBy)
throws Exception {
// take care of characterizations that don't have any special
// properties shown in the form, e.g. Size
if (domainChar == null) {
Class clazz = ClassUtils.getFullClass(className);
domainChar = (Characterization) clazz.newInstance();
}
if (domainChar instanceof Shape) {
domainChar = shape;
} else if (domainChar instanceof Solubility) {
domainChar = solubility;
} else if (domainChar instanceof PhysicalState) {
domainChar = physicalState;
} else if (domainChar instanceof Surface) {
domainChar = surface;
} else if (domainChar instanceof Cytotoxicity) {
domainChar = cytotoxicity;
} else if (domainChar instanceof EnzymeInduction) {
domainChar = enzymeInduction;
}
if (domainChar.getId() == null
|| domainChar.getCreatedBy() != null
&& domainChar.getCreatedBy().equals(
Constants.AUTO_COPY_ANNOTATION_PREFIX)) {
domainChar.setCreatedBy(createdBy);
domainChar.setCreatedDate(new Date());
}
domainChar.setDesignMethodsDescription(description);
domainChar.setAssayType(assayType);
domainChar.setAnalysisConclusion(conclusion);
if (pocBean != null && pocBean.getDomain().getId() != null
&& pocBean.getDomain().getId() != 0) {
domainChar.setPointOfContact(pocBean.getDomain());
} else {
domainChar.setPointOfContact(null);
}
domainChar.setDate(DateUtils.convertToDate(dateString,
Constants.DATE_FORMAT));
if (domainChar.getExperimentConfigCollection() != null) {
domainChar.getExperimentConfigCollection().clear();
} else {
domainChar
.setExperimentConfigCollection(new HashSet<ExperimentConfig>());
}
for (ExperimentConfigBean config : experimentConfigs) {
domainChar.getExperimentConfigCollection().add(config.getDomain());
}
if (protocolBean != null&& protocolBean.getDomain().getName()!=null) {
domainChar.setProtocol(protocolBean.getDomain());
} else {
domainChar.setProtocol(null);
}
if (domainChar.getFindingCollection() != null) {
domainChar.getFindingCollection().clear();
} else {
domainChar.setFindingCollection(new HashSet<Finding>());
}
for (FindingBean findingBean : findings) {
domainChar.getFindingCollection().add(findingBean.getDomain());
}
}
public String getDescription() {
return this.description;
}
public void setDescription(String description) {
this.description = description;
}
public ProtocolBean getProtocolBean() {
return protocolBean;
}
public Characterization getDomainChar() {
return domainChar;
}
public String getClassName() {
return className;
}
public void setClassName(String className) throws Exception {
this.className = className;
}
public List<ExperimentConfigBean> getExperimentConfigs() {
return experimentConfigs;
}
public ExperimentConfigBean getTheExperimentConfig() {
return theExperimentConfig;
}
public void setTheExperimentConfig(ExperimentConfigBean theExperimentConfig) {
this.theExperimentConfig = theExperimentConfig;
}
public void addExperimentConfig(ExperimentConfigBean experimentConfigBean) {
// if an old one exists, remove it first
if (experimentConfigs.contains(experimentConfigBean)) {
removeExperimentConfig(experimentConfigBean);
}
experimentConfigs.add(experimentConfigBean);
}
public void removeExperimentConfig(ExperimentConfigBean experimentConfigBean) {
experimentConfigs.remove(experimentConfigBean);
}
public Instrument getTheInstrument() {
return theInstrument;
}
public void setTheInstrument(Instrument theInstrument) {
this.theInstrument = theInstrument;
}
public String getDateString() {
return dateString;
}
public void setDateString(String dateString) {
this.dateString = dateString;
}
public PointOfContactBean getPocBean() {
return pocBean;
}
public void setPocBean(PointOfContactBean pocBean) {
this.pocBean = pocBean;
}
/**
* @return the theFinding
*/
public FindingBean getTheFinding() {
return theFinding;
}
/**
* @param theFinding
* the theFinding to set
*/
public void setTheFinding(FindingBean theFinding) {
this.theFinding = theFinding;
}
public void addFinding(FindingBean dataSetBean) {
// if an old one exists, remove it first
int index = findings.indexOf(dataSetBean);
if (index != -1) {
findings.remove(dataSetBean);
// retain the original order
findings.add(index, dataSetBean);
} else {
findings.add(dataSetBean);
}
}
public void removeFinding(FindingBean dataSetBean) {
findings.remove(dataSetBean);
}
/**
* @return the findings
*/
public List<FindingBean> getFindings() {
return findings;
}
public String getAssayCategory() {
return characterizationType;
}
public void setAssayCategory(String assayCategory) {
this.characterizationType = assayCategory;
}
public Cytotoxicity getCytotoxicity() {
return cytotoxicity;
}
public void setCytotoxicity(Cytotoxicity cytotoxicity) {
this.cytotoxicity = cytotoxicity;
}
public PhysicalState getPhysicalState() {
return physicalState;
}
public void setPhysicalState(PhysicalState physicalState) {
this.physicalState = physicalState;
}
public Shape getShape() {
return shape;
}
public void setShape(Shape shape) {
this.shape = shape;
}
public Solubility getSolubility() {
return solubility;
}
public void setSolubility(Solubility solubility) {
this.solubility = solubility;
}
public String getCharacterizationType() {
return characterizationType;
}
public void setCharacterizationType(String characterizationType) {
this.characterizationType = characterizationType;
}
public String getCharacterizationName() {
return characterizationName;
}
public void setCharacterizationName(String characterizationName) {
this.characterizationName = characterizationName;
}
public String getConclusion() {
return conclusion;
}
public void setConclusion(String conclusion) {
this.conclusion = conclusion;
}
public String getAssayType() {
return assayType;
}
public void setAssayType(String assayType) {
this.assayType = assayType;
}
public EnzymeInduction getEnzymeInduction() {
return enzymeInduction;
}
public void setEnzymeInduction(EnzymeInduction enzymeInduction) {
this.enzymeInduction = enzymeInduction;
}
public Surface getSurface() {
return surface;
}
public void setSurface(Surface surface) {
this.surface = surface;
}
public boolean isWithProperties() {
return withProperties;
}
}
| added new characterization type Transfection
SVN-Revision: 15531
| src/gov/nih/nci/cananolab/dto/particle/characterization/CharacterizationBean.java | added new characterization type Transfection |
|
Java | mit | 737ab6d7d18a52fc01ef0d8bc49e2da12e5ca352 | 0 | mozack/abra2,mozack/abra2,mozack/abra2,mozack/abra2 | package abra.cadabra;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import abra.CompareToReference2;
import abra.Feature;
import abra.Logger;
import abra.SAMRecordUtils;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.TextCigarCodec;
public class GermlineProcessor {
private static final int MIN_SUPPORTING_READS = 2;
private static final double MIN_ALLELE_FRACTION = 0.10;
private static final int MIN_MAPQ = 20;
private Germline cadabra;
private String normalBam;
private String tumorBam;
private ReadLocusReader normal;
private ReadLocusReader tumor;
private CompareToReference2 c2r;
private Feature region;
private int lastPos = 0;
List<SampleCall> sampleRecords = new ArrayList<SampleCall>();
List<SomaticCall> somaticCalls = new ArrayList<SomaticCall>();
Map<Integer, SampleCall> normalCalls = new HashMap<Integer, SampleCall>();
GermlineProcessor(Germline cadabra, String tumorBam, CompareToReference2 c2r) {
this.cadabra = cadabra;
this.tumorBam = tumorBam;
this.c2r = c2r;
}
GermlineProcessor(Germline cadabra, String normalBam, String tumorBam, CompareToReference2 c2r) {
this(cadabra, tumorBam, c2r);
this.normalBam = normalBam;
}
void process(Feature region) {
this.region = region;
this.tumor = new ReadLocusReader(tumorBam, region);
if (normalBam != null) {
this.normal = new ReadLocusReader(normalBam, region);
processSomatic();
} else {
processSimple();
}
}
private void processSimple() {
Iterator<ReadsAtLocus> sampleIter = tumor.iterator();
ReadsAtLocus sampleReads = null;
while (sampleIter.hasNext()) {
sampleReads = sampleIter.next();
SampleCall call = processLocus(sampleReads, false);
if (call != null && sampleCallExceedsThresholds(call)) {
sampleRecords.add(call);
}
}
this.cadabra.addCalls(region.getSeqname(), sampleRecords);
}
private boolean sampleCallExceedsThresholds(SampleCall call) {
return call.alt != null && call.alt != Allele.UNK && call.alleleCounts.get(call.alt).getCount() >= MIN_SUPPORTING_READS &&
call.getVaf() >= MIN_ALLELE_FRACTION;
}
private void processSomatic() {
Iterator<ReadsAtLocus> normalIter = normal.iterator();
Iterator<ReadsAtLocus> tumorIter = tumor.iterator();
ReadsAtLocus normalReads = null;
ReadsAtLocus tumorReads = null;
int count = 0;
while (normalIter.hasNext() && tumorIter.hasNext()) {
if (normalReads != null && tumorReads != null) {
int compare = normalReads.compareLoci(tumorReads, normal.getSamHeader().getSequenceDictionary());
if (compare < 0) {
normalReads = normalIter.next();
} else if (compare > 0) {
tumorReads = tumorIter.next();
} else {
SampleCall normalCall = processLocus(normalReads, true);
SampleCall tumorCall = processLocus(tumorReads, true);
if (tumorCall.alt != null && tumorCall.alt != Allele.UNK && tumorCall.alleleCounts.get(tumorCall.alt).getCount() >= MIN_SUPPORTING_READS) {
if (normalCall.getVaf()/tumorCall.getVaf() < .2) {
int chromosomeLength = c2r.getChromosomeLength(tumorCall.chromosome);
String refSeq = "N";
if (tumorCall.position > 10 && tumorCall.position < chromosomeLength-10) {
refSeq = c2r.getSequence(tumorCall.chromosome, tumorCall.position-9, 20);
}
SomaticCall somaticCall = new SomaticCall(normalCall, tumorCall, refSeq);
somaticCalls.add(somaticCall);
}
}
if (normalCall.alt != null && (normalCall.alt.getType() == Allele.Type.DEL || normalCall.alt.getType() == Allele.Type.INS)) {
normalCalls.put(normalCall.position, normalCall);
}
normalReads = normalIter.next();
tumorReads = tumorIter.next();
}
if ((count % 1000000) == 0) {
System.err.println("Position: " + normalReads.getChromosome() + ":" + normalReads.getPosition());
}
count += 1;
} else {
normalReads = normalIter.next();
tumorReads = tumorIter.next();
}
}
// Annotate somatic calls that have overlapping normal indels
for (SomaticCall call : somaticCalls) {
int pos = call.tumor.position;
int normalOverlap = 0;
int stop = pos;
if (call.tumor.alt.getType() == Allele.Type.DEL) {
stop += call.tumor.alt.getLength()+1;
} else {
stop += 1;
}
for (int i=pos-100; i<=stop; i++) {
SampleCall normalCall = normalCalls.get(pos);
if (normalCall != null) {
int normalStop = normalCall.alt.getType() == Allele.Type.DEL ?
normalCall.position + normalCall.alt.getLength() + 1 : normalCall.position + 1;
if (normalStop >= pos) {
normalOverlap += 1;
}
}
}
call.overlappingNormalAF = (float) normalOverlap / (float) call.normal.usableDepth;
}
normalCalls.clear();
this.cadabra.addSomaticCalls(region.getSeqname(), somaticCalls);
}
private Character getBaseAtPosition(SAMRecord read, int refPos) {
int readPos = 0;
int refPosInRead = read.getAlignmentStart();
int cigarElementIdx = 0;
while (refPosInRead <= refPos && cigarElementIdx < read.getCigar().numCigarElements() && readPos < read.getReadLength()) {
CigarElement elem = read.getCigar().getCigarElement(cigarElementIdx++);
switch(elem.getOperator()) {
case H: //NOOP
break;
case S:
case I:
readPos += elem.getLength();
break;
case D:
case N:
refPosInRead += elem.getLength();
break;
case M:
if (refPos < (refPosInRead + elem.getLength())) {
readPos += refPos - refPosInRead;
if (readPos < read.getReadLength()) {
// Found the base. Return it
return read.getReadString().charAt(readPos);
}
} else {
readPos += elem.getLength();
refPosInRead += elem.getLength();
}
break;
default:
throw new IllegalArgumentException("Invalid Cigar Operator: " + elem.getOperator() + " for read: " + read.getSAMString());
}
}
return null;
}
private char getRefBase(String chr, int pos) {
return c2r.getSequence(chr, pos, 1).charAt(0);
}
private Allele getAltIndelAllele(Allele ref, Map<Allele, AlleleCounts> alleleCounts) {
int maxAlt = 0;
Allele alt = null;
for (Allele allele : alleleCounts.keySet()) {
if (allele != ref) {
AlleleCounts ac = alleleCounts.get(allele);
if (ac.getCount() > maxAlt && (allele.getType() == Allele.Type.DEL || allele.getType() == Allele.Type.INS)) {
maxAlt = ac.getCount();
alt = allele;
}
}
}
return alt;
}
private SampleCall processLocus(ReadsAtLocus reads, boolean isSomatic) {
SampleCall call = null;
String chromosome = reads.getChromosome();
int position = reads.getPosition();
if (position > lastPos + 5000000) {
Logger.info("Processing: %s:%d", chromosome, position);
lastPos = position;
}
int tumorMapq0 = 0;
int mismatchExceededReads = 0;
int totalDepth = 0;
Map<Allele, AlleleCounts> alleleCounts = new HashMap<Allele, AlleleCounts>();
// Always include ref allele
char refBase = getRefBase(chromosome, position);
Allele refAllele = Allele.getAllele(refBase);
alleleCounts.put(refAllele, new AlleleCounts());
for (SAMRecord read : reads.getReads()) {
if (!read.getDuplicateReadFlag() && !read.getReadUnmappedFlag() &&
(read.getFlags() & 0x900) == 0) {
totalDepth += 1;
if (read.getMappingQuality() < MIN_MAPQ) {
if (read.getMappingQuality() == 0) {
tumorMapq0 += 1;
}
continue;
}
if (read.getStringAttribute("YA") == null) {
// Cap # mismatches in read that can be counted as reference
// This is done because realigner caps # of mismatches for remapped indel reads.
// This is needed to remove ref bias
int editDist = SAMRecordUtils.getEditDistance(read, null);
int indelBases = SAMRecordUtils.getNumIndelBases(read);
int numMismatches = editDist - indelBases;
float mismatchRate = (float) .05;
if (numMismatches > SAMRecordUtils.getMappedLength(read) * mismatchRate) {
// Skip this read
mismatchExceededReads += 1;
continue;
}
}
IndelInfo readElement = checkForIndelAtLocus(read, position);
Allele allele = Allele.UNK;
if (readElement != null) {
if (readElement.getCigarElement().getOperator() == CigarOperator.D) {
allele = new Allele(Allele.Type.DEL, readElement.getCigarElement().getLength());
} else if (readElement.getCigarElement().getOperator() == CigarOperator.I) {
allele = new Allele(Allele.Type.INS, readElement.getCigarElement().getLength());
}
} else {
Character base = getBaseAtPosition(read, position);
Character nextBase = getBaseAtPosition(read, position+1);
IndelInfo readIndel = checkForIndelAtLocus(read.getAlignmentStart(),
read.getCigar(), position);
if (readIndel == null && base != null && nextBase != null) {
allele = Allele.getAllele(base);
}
}
if (allele != Allele.UNK) {
if (!alleleCounts.containsKey(allele)) {
alleleCounts.put(allele, new AlleleCounts());
}
AlleleCounts ac = alleleCounts.get(allele);
ac.incrementCount(read);
if (readElement != null) {
ac.updateReadIdx(readElement.getReadIndex());
}
if (allele.getType() == Allele.Type.INS) {
ac.updateInsertBases(readElement.getInsertBases());
}
}
}
}
// Allow readId sets to be garbage collected.
for (AlleleCounts counts : alleleCounts.values()) {
counts.clearReadIds();
}
Allele alt = getAltIndelAllele(Allele.getAllele(refBase), alleleCounts);
int usableDepth = AlleleCounts.sum(alleleCounts.values());
String refSeq = null;
if (!isSomatic) {
int chromosomeLength = c2r.getChromosomeLength(chromosome);
refSeq = "N";
if (position > 10 && position < chromosomeLength-10) {
refSeq = c2r.getSequence(chromosome, position-9, 20);
}
}
if (alt != null && (alt.getType() == Allele.Type.DEL || alt.getType() == Allele.Type.INS) && refAllele != Allele.UNK) {
AlleleCounts altCounts = alleleCounts.get(alt);
AlleleCounts refCounts = alleleCounts.get(refAllele);
// if (altCounts.getCount() >= MIN_SUPPORTING_READS && af >= MIN_ALLELE_FRACTION) {
double qual = isSomatic ? 0 : calcPhredScaledQuality(refCounts.getCount(), altCounts.getCount(), usableDepth);
int repeatPeriod = getRepeatPeriod(chromosome, position, alt, altCounts);
String refField = "";
String altField = "";
if (alt.getType() == Allele.Type.DEL) {
refField = getDelRefField(chromosome, position, alt.getLength());
altField = refField.substring(0, 1);
} else if (alt.getType() == Allele.Type.INS) {
refField = getInsRefField(chromosome, position);
altField = refField + getPreferredInsertBases(alt, altCounts);
}
call = new SampleCall(chromosome, position, refAllele, alt, alleleCounts, totalDepth,
usableDepth, qual, repeatPeriod, tumorMapq0, refField, altField, mismatchExceededReads, refSeq);
// }
} else {
String refField = getInsRefField(chromosome, position);
String altField = ".";
double qual = 0;
int rp = 0;
call = new SampleCall(chromosome, position, refAllele, Allele.UNK, alleleCounts, totalDepth,
usableDepth, qual, rp, tumorMapq0, refField, altField, mismatchExceededReads, refSeq);
}
return call;
}
private String getPreferredInsertBases(Allele allele, AlleleCounts counts) {
String bases = null;
if (counts.getPreferredInsertBases().isEmpty()) {
StringBuffer buf = new StringBuffer();
for (int i=0; i<allele.getLength(); i++) {
buf.append('N');
}
bases = buf.toString();
} else {
bases = counts.getPreferredInsertBases();
}
return bases;
}
public static class SampleCall {
public static final String FORMAT = "DP:DP2:AD:AD2:MIRI:MARI:SOR:FS:MQ0:ISPAN:VAF:MER:BB:GT";
String chromosome;
int position;
Allele ref;
Allele alt;
Map<Allele, AlleleCounts> alleleCounts;
int totalReads;
int usableDepth;
double qual;
int repeatPeriod;
int mapq0;
String refField;
String altField;
double fs;
int mismatchExceededReads;
HomopolymerRun hrun;
String context;
SampleCall(String chromosome, int position, Allele ref, Allele alt, Map<Allele, AlleleCounts> alleleCounts,
int totalReads, int usableDepth, double qual, int repeatPeriod, int mapq0, String refField, String altField,
int mismatchExceededReads, String context) {
this.chromosome = chromosome;
this.position = position;
this.ref = ref;
this.alt = alt;
this.alleleCounts = alleleCounts;
this.totalReads = totalReads;
this.usableDepth = usableDepth;
this.qual = qual;
this.repeatPeriod = repeatPeriod;
this.mapq0 = mapq0;
this.refField = refField;
this.altField = altField;
AlleleCounts refCounts = alleleCounts.get(ref);
AlleleCounts altCounts = alleleCounts.get(alt);
if (refCounts != null && altCounts != null) {
// this.fs = strandBias(refCounts.getFwd(), refCounts.getRev(), altCounts.getFwd(), altCounts.getRev());
this.fs = 0;
}
this.mismatchExceededReads = mismatchExceededReads;
if (context != null) {
this.hrun = HomopolymerRun.find(context);
this.context = context;
}
}
public float getVaf() {
float vaf = 0;
AlleleCounts altCounts = alleleCounts.get(alt);
if (altCounts != null) {
vaf = (float) altCounts.getCount() / (float) usableDepth;
}
return vaf;
}
public String getSampleInfo(Allele ref, Allele alt) {
AlleleCounts refCounts = alleleCounts.get(ref);
AlleleCounts altCounts = alleleCounts.get(alt);
if (refCounts == null) {
refCounts = AlleleCounts.EMPTY_COUNTS;
}
if (altCounts == null) {
altCounts = AlleleCounts.EMPTY_COUNTS;
}
int ispan = altCounts == null ? 0 : altCounts.getMaxReadIdx()-altCounts.getMinReadIdx();
float vaf = getVaf();
double bbQual = calcPhredScaledQuality(refCounts.getCount(), altCounts.getCount(), usableDepth);
String sampleInfo = String.format("%d:%d:%d,%d:%d,%d:%d:%d:%d,%d,%d,%d:%f:%d:%d:%f:%d:%f:0/1", totalReads, usableDepth, refCounts.getCount(), altCounts.getCount(),
refCounts.getTotalCount(), altCounts.getTotalCount(),
altCounts.getMinReadIdx(), altCounts.getMaxReadIdx(), refCounts.getFwd(), refCounts.getRev(), altCounts.getFwd(), altCounts.getRev(),
fs, mapq0, ispan, vaf, mismatchExceededReads, bbQual);
return sampleInfo;
}
public String toString() {
//
// chr1 14397 . CTGT C 31.08108108108108 PASS SOMATIC;CMQ=0;CTX=TAAAAGCACACTGTTGGTTT;REPEAT_PERIOD=1;NNAF=<NNAF>
// DP:AD:YM0:YM1:YM:OBS:MIRI:MARI:SOR:MQ0:GT 1092:51,23:0:0:0:23:5:36:0,51,1,22:981:0/1
String pos = String.valueOf(position);
String qualStr = String.valueOf(qual);
int hrunLen = hrun != null ? hrun.getLength() : 0;
char hrunBase = hrun != null ? hrun.getBase() : 'N';
int hrunPos = hrun != null ? hrun.getPos() : 0;
String info = String.format("REPEAT_PERIOD=%d;HRUN=%d,%c,%d;REF=%s", repeatPeriod,
hrunLen, hrunBase, hrunPos, context);
String sampleInfo = getSampleInfo(ref, alt);
return String.join("\t", chromosome, pos, ".", refField, altField, qualStr, "PASS", info, SampleCall.FORMAT, sampleInfo);
}
}
static double calcFisherExactPhredScaledQuality(int normalRefObs, int normalAltObs, int tumorRefObs, int tumorAltObs) {
FishersExactTest test = new FishersExactTest();
// Calc p-value
double p = test.oneTailedTest(normalRefObs, normalAltObs, tumorRefObs, tumorAltObs);
// Convert to phred scale
double qual = -10 * Math.log10(p);
// Round to tenths
qual = (int) (qual * 10);
qual = qual / 10.0;
return qual;
}
public static class SomaticCall {
SampleCall normal;
SampleCall tumor;
double qual;
float overlappingNormalAF;
HomopolymerRun hrun;
String context;
public SomaticCall(SampleCall normal, SampleCall tumor, String context) {
this.normal = normal;
this.tumor = tumor;
int normalRef = normal.alleleCounts.get(tumor.ref) == null ? 0 : normal.alleleCounts.get(tumor.ref).getCount();
int normalAlt = normal.alleleCounts.get(tumor.alt) == null ? 0 : normal.alleleCounts.get(tumor.alt).getCount();
int tumorRef = tumor.alleleCounts.get(tumor.ref).getCount();
int tumorAlt = tumor.alleleCounts.get(tumor.alt).getCount();
this.qual = calcFisherExactPhredScaledQuality(normalRef, normalAlt, tumorRef, tumorAlt);
this.hrun = HomopolymerRun.find(context);
this.context = context;
}
public String toString() {
String pos = String.valueOf(tumor.position);
String qualStr = String.valueOf(qual);
int hrunLen = hrun != null ? hrun.getLength() : 0;
char hrunBase = hrun != null ? hrun.getBase() : 'N';
int hrunPos = hrun != null ? hrun.getPos() : 0;
String info = String.format("REPEAT_PERIOD=%d;ONAF=%f;HRUN=%d,%c,%d;REF=%s", tumor.repeatPeriod, overlappingNormalAF,
hrunLen, hrunBase, hrunPos, context);
String normalInfo = normal.getSampleInfo(tumor.ref, tumor.alt);
String tumorInfo = tumor.getSampleInfo(tumor.ref, tumor.alt);
return String.join("\t", tumor.chromosome, pos, ".", tumor.refField, tumor.altField, qualStr, "PASS", info, SampleCall.FORMAT, normalInfo, tumorInfo);
}
}
static double strandBias(int rf, int rr, int af, int ar) {
FishersExactTest test = new FishersExactTest();
double sb = test.twoTailedTest(rf, rf, af, ar);
return sb;
}
static double calcPhredScaledQuality(int refObs, int altObs, int dp) {
return -10 * Math.log10(BetaBinomial.betabinCDF(dp, altObs));
}
private int getRepeatPeriod(String chromosome, int position, Allele indel, AlleleCounts indelCounts) {
int chromosomeEnd = c2r.getReferenceLength(chromosome);
int length = Math.min(indel.getLength() * 20, chromosomeEnd-position-2);
String sequence = c2r.getSequence(chromosome, position+1, length);
String bases;
if (indel.getType() == Allele.Type.DEL) {
bases = sequence.substring(0, indel.getLength());
} else {
bases = indelCounts.getPreferredInsertBases();
}
int period = 0;
if (bases.length() > 0) {
int index = 0;
while ((index+bases.length() < length) && (bases.equals(sequence.substring(index, index+bases.length())))) {
period += 1;
index += bases.length();
}
}
return period;
}
private String getDelRefField(String chromosome, int position, int length) {
return c2r.getSequence(chromosome, position, length+1);
}
private String getInsRefField(String chromosome, int position) {
return c2r.getSequence(chromosome, position, 1);
}
private IndelInfo checkForIndelAtLocus(SAMRecord read, int refPos) {
IndelInfo elem = null;
String contigInfo = read.getStringAttribute("YA");
if (contigInfo != null) {
// Get assembled contig info.
String[] fields = contigInfo.split(":");
int contigPos = Integer.parseInt(fields[1]);
Cigar contigCigar = TextCigarCodec.decode(fields[2]);
// Check to see if contig contains indel at current locus
elem = checkForIndelAtLocus(contigPos, contigCigar, refPos);
if (elem != null) {
// Now check to see if this read supports the indel
IndelInfo readElem = checkForIndelAtLocus(read.getAlignmentStart(),
read.getCigar(), refPos);
// Allow partially overlapping indels to support contig
// (Should only matter for inserts)
if (readElem == null || readElem.getCigarElement().getOperator() != elem.getCigarElement().getOperator()) {
// Read element doesn't match contig indel
elem = null;
} else {
elem.setReadIndex(readElem.getReadIndex());
// If this read overlaps the entire insert, capture the bases.
if (elem.getCigarElement().getOperator() == CigarOperator.I &&
elem.getCigarElement().getLength() == readElem.getCigarElement().getLength()) {
String insertBases = read.getReadString().substring(readElem.getReadIndex(), readElem.getReadIndex()+readElem.getCigarElement().getLength());
elem.setInsertBases(insertBases);
}
}
}
}
return elem;
}
private IndelInfo checkForIndelAtLocus(int alignmentStart, Cigar cigar, int refPos) {
IndelInfo ret = null;
int readIdx = 0;
int currRefPos = alignmentStart;
for (CigarElement element : cigar.getCigarElements()) {
if (element.getOperator() == CigarOperator.M) {
readIdx += element.getLength();
currRefPos += element.getLength();
} else if (element.getOperator() == CigarOperator.I) {
if (currRefPos == refPos+1) {
ret = new IndelInfo(element, readIdx);
break;
}
readIdx += element.getLength();
} else if (element.getOperator() == CigarOperator.D) {
if (currRefPos == refPos+1) {
ret = new IndelInfo(element, readIdx);
break;
}
currRefPos += element.getLength();
} else if (element.getOperator() == CigarOperator.S) {
readIdx += element.getLength();
}
}
return ret;
}
}
| src/main/java/abra/cadabra/GermlineProcessor.java | package abra.cadabra;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import abra.CompareToReference2;
import abra.Feature;
import abra.Logger;
import abra.SAMRecordUtils;
import htsjdk.samtools.Cigar;
import htsjdk.samtools.CigarElement;
import htsjdk.samtools.CigarOperator;
import htsjdk.samtools.SAMRecord;
import htsjdk.samtools.TextCigarCodec;
public class GermlineProcessor {
private static final int MIN_SUPPORTING_READS = 2;
private static final double MIN_ALLELE_FRACTION = 0.10;
private static final int MIN_MAPQ = 20;
private Germline cadabra;
private String normalBam;
private String tumorBam;
private ReadLocusReader normal;
private ReadLocusReader tumor;
private CompareToReference2 c2r;
private Feature region;
private int lastPos = 0;
List<SampleCall> sampleRecords = new ArrayList<SampleCall>();
List<SomaticCall> somaticCalls = new ArrayList<SomaticCall>();
Map<Integer, SampleCall> normalCalls = new HashMap<Integer, SampleCall>();
GermlineProcessor(Germline cadabra, String tumorBam, CompareToReference2 c2r) {
this.cadabra = cadabra;
this.tumorBam = tumorBam;
this.c2r = c2r;
}
GermlineProcessor(Germline cadabra, String normalBam, String tumorBam, CompareToReference2 c2r) {
this(cadabra, tumorBam, c2r);
this.normalBam = normalBam;
}
void process(Feature region) {
this.region = region;
this.tumor = new ReadLocusReader(tumorBam, region);
if (normalBam != null) {
this.normal = new ReadLocusReader(normalBam, region);
processSomatic();
} else {
processSimple();
}
}
private void processSimple() {
Iterator<ReadsAtLocus> sampleIter = tumor.iterator();
ReadsAtLocus sampleReads = null;
while (sampleIter.hasNext()) {
sampleReads = sampleIter.next();
SampleCall call = processLocus(sampleReads);
if (call != null && sampleCallExceedsThresholds(call)) {
sampleRecords.add(call);
}
}
this.cadabra.addCalls(region.getSeqname(), sampleRecords);
}
private boolean sampleCallExceedsThresholds(SampleCall call) {
return call.alt != null && call.alt != Allele.UNK && call.alleleCounts.get(call.alt).getCount() >= MIN_SUPPORTING_READS &&
call.getVaf() >= MIN_ALLELE_FRACTION;
}
private void processSomatic() {
Iterator<ReadsAtLocus> normalIter = normal.iterator();
Iterator<ReadsAtLocus> tumorIter = tumor.iterator();
ReadsAtLocus normalReads = null;
ReadsAtLocus tumorReads = null;
int count = 0;
while (normalIter.hasNext() && tumorIter.hasNext()) {
if (normalReads != null && tumorReads != null) {
int compare = normalReads.compareLoci(tumorReads, normal.getSamHeader().getSequenceDictionary());
if (compare < 0) {
normalReads = normalIter.next();
} else if (compare > 0) {
tumorReads = tumorIter.next();
} else {
SampleCall normalCall = processLocus(normalReads);
SampleCall tumorCall = processLocus(tumorReads);
if (tumorCall.alt != null && tumorCall.alt != Allele.UNK && tumorCall.alleleCounts.get(tumorCall.alt).getCount() >= MIN_SUPPORTING_READS) {
if (normalCall.getVaf()/tumorCall.getVaf() < .2) {
int chromosomeLength = c2r.getChromosomeLength(tumorCall.chromosome);
String refSeq = "N";
if (tumorCall.position > 10 && tumorCall.position < chromosomeLength-10) {
refSeq = c2r.getSequence(tumorCall.chromosome, tumorCall.position-9, 20);
}
SomaticCall somaticCall = new SomaticCall(normalCall, tumorCall, refSeq);
somaticCalls.add(somaticCall);
}
}
if (normalCall.alt != null && (normalCall.alt.getType() == Allele.Type.DEL || normalCall.alt.getType() == Allele.Type.INS)) {
normalCalls.put(normalCall.position, normalCall);
}
normalReads = normalIter.next();
tumorReads = tumorIter.next();
}
if ((count % 1000000) == 0) {
System.err.println("Position: " + normalReads.getChromosome() + ":" + normalReads.getPosition());
}
count += 1;
} else {
normalReads = normalIter.next();
tumorReads = tumorIter.next();
}
}
// Annotate somatic calls that have overlapping normal indels
for (SomaticCall call : somaticCalls) {
int pos = call.tumor.position;
int normalOverlap = 0;
int stop = pos;
if (call.tumor.alt.getType() == Allele.Type.DEL) {
stop += call.tumor.alt.getLength()+1;
} else {
stop += 1;
}
for (int i=pos-100; i<=stop; i++) {
SampleCall normalCall = normalCalls.get(pos);
if (normalCall != null) {
int normalStop = normalCall.alt.getType() == Allele.Type.DEL ?
normalCall.position + normalCall.alt.getLength() + 1 : normalCall.position + 1;
if (normalStop >= pos) {
normalOverlap += 1;
}
}
}
call.overlappingNormalAF = (float) normalOverlap / (float) call.normal.usableDepth;
}
normalCalls.clear();
this.cadabra.addSomaticCalls(region.getSeqname(), somaticCalls);
}
private Character getBaseAtPosition(SAMRecord read, int refPos) {
int readPos = 0;
int refPosInRead = read.getAlignmentStart();
int cigarElementIdx = 0;
while (refPosInRead <= refPos && cigarElementIdx < read.getCigar().numCigarElements() && readPos < read.getReadLength()) {
CigarElement elem = read.getCigar().getCigarElement(cigarElementIdx++);
switch(elem.getOperator()) {
case H: //NOOP
break;
case S:
case I:
readPos += elem.getLength();
break;
case D:
case N:
refPosInRead += elem.getLength();
break;
case M:
if (refPos < (refPosInRead + elem.getLength())) {
readPos += refPos - refPosInRead;
if (readPos < read.getReadLength()) {
// Found the base. Return it
return read.getReadString().charAt(readPos);
}
} else {
readPos += elem.getLength();
refPosInRead += elem.getLength();
}
break;
default:
throw new IllegalArgumentException("Invalid Cigar Operator: " + elem.getOperator() + " for read: " + read.getSAMString());
}
}
return null;
}
private char getRefBase(String chr, int pos) {
return c2r.getSequence(chr, pos, 1).charAt(0);
}
private Allele getAltIndelAllele(Allele ref, Map<Allele, AlleleCounts> alleleCounts) {
int maxAlt = 0;
Allele alt = null;
for (Allele allele : alleleCounts.keySet()) {
if (allele != ref) {
AlleleCounts ac = alleleCounts.get(allele);
if (ac.getCount() > maxAlt && (allele.getType() == Allele.Type.DEL || allele.getType() == Allele.Type.INS)) {
maxAlt = ac.getCount();
alt = allele;
}
}
}
return alt;
}
private SampleCall processLocus(ReadsAtLocus reads) {
SampleCall call = null;
String chromosome = reads.getChromosome();
int position = reads.getPosition();
if (position > lastPos + 5000000) {
Logger.info("Processing: %s:%d", chromosome, position);
lastPos = position;
}
int tumorMapq0 = 0;
int mismatchExceededReads = 0;
int totalDepth = 0;
Map<Allele, AlleleCounts> alleleCounts = new HashMap<Allele, AlleleCounts>();
// Always include ref allele
char refBase = getRefBase(chromosome, position);
Allele refAllele = Allele.getAllele(refBase);
alleleCounts.put(refAllele, new AlleleCounts());
for (SAMRecord read : reads.getReads()) {
if (!read.getDuplicateReadFlag() && !read.getReadUnmappedFlag() &&
(read.getFlags() & 0x900) == 0) {
totalDepth += 1;
if (read.getMappingQuality() < MIN_MAPQ) {
if (read.getMappingQuality() == 0) {
tumorMapq0 += 1;
}
continue;
}
if (read.getStringAttribute("YA") == null) {
// Cap # mismatches in read that can be counted as reference
// This is done because realigner caps # of mismatches for remapped indel reads.
// This is needed to remove ref bias
int editDist = SAMRecordUtils.getEditDistance(read, null);
int indelBases = SAMRecordUtils.getNumIndelBases(read);
int numMismatches = editDist - indelBases;
float mismatchRate = (float) .05;
if (numMismatches > SAMRecordUtils.getMappedLength(read) * mismatchRate) {
// Skip this read
mismatchExceededReads += 1;
continue;
}
}
IndelInfo readElement = checkForIndelAtLocus(read, position);
Allele allele = Allele.UNK;
if (readElement != null) {
if (readElement.getCigarElement().getOperator() == CigarOperator.D) {
allele = new Allele(Allele.Type.DEL, readElement.getCigarElement().getLength());
} else if (readElement.getCigarElement().getOperator() == CigarOperator.I) {
allele = new Allele(Allele.Type.INS, readElement.getCigarElement().getLength());
}
} else {
Character base = getBaseAtPosition(read, position);
Character nextBase = getBaseAtPosition(read, position+1);
IndelInfo readIndel = checkForIndelAtLocus(read.getAlignmentStart(),
read.getCigar(), position);
if (readIndel == null && base != null && nextBase != null) {
allele = Allele.getAllele(base);
}
}
if (allele != Allele.UNK) {
if (!alleleCounts.containsKey(allele)) {
alleleCounts.put(allele, new AlleleCounts());
}
AlleleCounts ac = alleleCounts.get(allele);
ac.incrementCount(read);
if (readElement != null) {
ac.updateReadIdx(readElement.getReadIndex());
}
if (allele.getType() == Allele.Type.INS) {
ac.updateInsertBases(readElement.getInsertBases());
}
}
}
}
// Allow readId sets to be garbage collected.
for (AlleleCounts counts : alleleCounts.values()) {
counts.clearReadIds();
}
Allele alt = getAltIndelAllele(Allele.getAllele(refBase), alleleCounts);
int usableDepth = AlleleCounts.sum(alleleCounts.values());
if (alt != null && (alt.getType() == Allele.Type.DEL || alt.getType() == Allele.Type.INS) && refAllele != Allele.UNK) {
AlleleCounts altCounts = alleleCounts.get(alt);
AlleleCounts refCounts = alleleCounts.get(refAllele);
// if (altCounts.getCount() >= MIN_SUPPORTING_READS && af >= MIN_ALLELE_FRACTION) {
double qual = calcPhredScaledQuality(refCounts.getCount(), altCounts.getCount(), usableDepth);
int repeatPeriod = getRepeatPeriod(chromosome, position, alt, altCounts);
String refField = "";
String altField = "";
if (alt.getType() == Allele.Type.DEL) {
refField = getDelRefField(chromosome, position, alt.getLength());
altField = refField.substring(0, 1);
} else if (alt.getType() == Allele.Type.INS) {
refField = getInsRefField(chromosome, position);
altField = refField + getPreferredInsertBases(alt, altCounts);
}
call = new SampleCall(chromosome, position, refAllele, alt, alleleCounts, totalDepth,
usableDepth, qual, repeatPeriod, tumorMapq0, refField, altField, mismatchExceededReads);
// }
} else {
String refField = getInsRefField(chromosome, position);
String altField = ".";
double qual = 0;
int rp = 0;
call = new SampleCall(chromosome, position, refAllele, Allele.UNK, alleleCounts, totalDepth,
usableDepth, qual, rp, tumorMapq0, refField, altField, mismatchExceededReads);
}
return call;
}
private String getPreferredInsertBases(Allele allele, AlleleCounts counts) {
String bases = null;
if (counts.getPreferredInsertBases().isEmpty()) {
StringBuffer buf = new StringBuffer();
for (int i=0; i<allele.getLength(); i++) {
buf.append('N');
}
bases = buf.toString();
} else {
bases = counts.getPreferredInsertBases();
}
return bases;
}
public static class SampleCall {
public static final String FORMAT = "DP:DP2:AD:AD2:MIRI:MARI:SOR:FS:MQ0:ISPAN:VAF:MER:BB:GT";
String chromosome;
int position;
Allele ref;
Allele alt;
Map<Allele, AlleleCounts> alleleCounts;
int totalReads;
int usableDepth;
double qual;
int repeatPeriod;
int mapq0;
String refField;
String altField;
double fs;
int mismatchExceededReads;
SampleCall(String chromosome, int position, Allele ref, Allele alt, Map<Allele, AlleleCounts> alleleCounts,
int totalReads, int usableDepth, double qual, int repeatPeriod, int mapq0, String refField, String altField,
int mismatchExceededReads) {
this.chromosome = chromosome;
this.position = position;
this.ref = ref;
this.alt = alt;
this.alleleCounts = alleleCounts;
this.totalReads = totalReads;
this.usableDepth = usableDepth;
this.qual = qual;
this.repeatPeriod = repeatPeriod;
this.mapq0 = mapq0;
this.refField = refField;
this.altField = altField;
AlleleCounts refCounts = alleleCounts.get(ref);
AlleleCounts altCounts = alleleCounts.get(alt);
if (refCounts != null && altCounts != null) {
// this.fs = strandBias(refCounts.getFwd(), refCounts.getRev(), altCounts.getFwd(), altCounts.getRev());
this.fs = 0;
}
this.mismatchExceededReads = mismatchExceededReads;
}
public float getVaf() {
float vaf = 0;
AlleleCounts altCounts = alleleCounts.get(alt);
if (altCounts != null) {
vaf = (float) altCounts.getCount() / (float) usableDepth;
}
return vaf;
}
public String getSampleInfo(Allele ref, Allele alt) {
AlleleCounts refCounts = alleleCounts.get(ref);
AlleleCounts altCounts = alleleCounts.get(alt);
if (refCounts == null) {
refCounts = AlleleCounts.EMPTY_COUNTS;
}
if (altCounts == null) {
altCounts = AlleleCounts.EMPTY_COUNTS;
}
int ispan = altCounts == null ? 0 : altCounts.getMaxReadIdx()-altCounts.getMinReadIdx();
float vaf = getVaf();
double bbQual = calcPhredScaledQuality(refCounts.getCount(), altCounts.getCount(), usableDepth);
String sampleInfo = String.format("%d:%d:%d,%d:%d,%d:%d:%d:%d,%d,%d,%d:%f:%d:%d:%f:%d:%f:0/1", totalReads, usableDepth, refCounts.getCount(), altCounts.getCount(),
refCounts.getTotalCount(), altCounts.getTotalCount(),
altCounts.getMinReadIdx(), altCounts.getMaxReadIdx(), refCounts.getFwd(), refCounts.getRev(), altCounts.getFwd(), altCounts.getRev(),
fs, mapq0, ispan, vaf, mismatchExceededReads, bbQual);
return sampleInfo;
}
public String toString() {
//
// chr1 14397 . CTGT C 31.08108108108108 PASS SOMATIC;CMQ=0;CTX=TAAAAGCACACTGTTGGTTT;REPEAT_PERIOD=1;NNAF=<NNAF>
// DP:AD:YM0:YM1:YM:OBS:MIRI:MARI:SOR:MQ0:GT 1092:51,23:0:0:0:23:5:36:0,51,1,22:981:0/1
String pos = String.valueOf(position);
String qualStr = String.valueOf(qual);
String info = String.format("REPEAT_PERIOD=%d;", repeatPeriod);
String format = "DP:DP2:AD:MIRI:MARI:SOR:FS:MQ0:ISPAN:VAF:MER:GT";
String sampleInfo = getSampleInfo(ref, alt);
return String.join("\t", chromosome, pos, ".", refField, altField, qualStr, "PASS", info, format, sampleInfo);
}
}
static double calcFisherExactPhredScaledQuality(int normalRefObs, int normalAltObs, int tumorRefObs, int tumorAltObs) {
FishersExactTest test = new FishersExactTest();
// Calc p-value
double p = test.oneTailedTest(normalRefObs, normalAltObs, tumorRefObs, tumorAltObs);
// Convert to phred scale
double qual = -10 * Math.log10(p);
// Round to tenths
qual = (int) (qual * 10);
qual = qual / 10.0;
return qual;
}
public static class SomaticCall {
SampleCall normal;
SampleCall tumor;
double qual;
float overlappingNormalAF;
HomopolymerRun hrun;
String context;
public SomaticCall(SampleCall normal, SampleCall tumor, String context) {
this.normal = normal;
this.tumor = tumor;
int normalRef = normal.alleleCounts.get(tumor.ref) == null ? 0 : normal.alleleCounts.get(tumor.ref).getCount();
int normalAlt = normal.alleleCounts.get(tumor.alt) == null ? 0 : normal.alleleCounts.get(tumor.alt).getCount();
int tumorRef = tumor.alleleCounts.get(tumor.ref).getCount();
int tumorAlt = tumor.alleleCounts.get(tumor.alt).getCount();
this.qual = calcFisherExactPhredScaledQuality(normalRef, normalAlt, tumorRef, tumorAlt);
this.hrun = HomopolymerRun.find(context);
this.context = context;
}
public String toString() {
String pos = String.valueOf(tumor.position);
String qualStr = String.valueOf(qual);
int hrunLen = hrun != null ? hrun.getLength() : 0;
char hrunBase = hrun != null ? hrun.getBase() : 'N';
int hrunPos = hrun != null ? hrun.getPos() : 0;
String info = String.format("REPEAT_PERIOD=%d;ONAF=%f;HRUN=%d,%c,%d;REF=%s", tumor.repeatPeriod, overlappingNormalAF,
hrunLen, hrunBase, hrunPos, context);
String normalInfo = normal.getSampleInfo(tumor.ref, tumor.alt);
String tumorInfo = tumor.getSampleInfo(tumor.ref, tumor.alt);
return String.join("\t", tumor.chromosome, pos, ".", tumor.refField, tumor.altField, qualStr, "PASS", info, SampleCall.FORMAT, normalInfo, tumorInfo);
}
}
static double strandBias(int rf, int rr, int af, int ar) {
FishersExactTest test = new FishersExactTest();
double sb = test.twoTailedTest(rf, rf, af, ar);
return sb;
}
static double calcPhredScaledQuality(int refObs, int altObs, int dp) {
//return -10 * Math.log10(BetaBinomial.betabinCDF(dp, altObs));
return 0;
}
private int getRepeatPeriod(String chromosome, int position, Allele indel, AlleleCounts indelCounts) {
int chromosomeEnd = c2r.getReferenceLength(chromosome);
int length = Math.min(indel.getLength() * 20, chromosomeEnd-position-2);
String sequence = c2r.getSequence(chromosome, position+1, length);
String bases;
if (indel.getType() == Allele.Type.DEL) {
bases = sequence.substring(0, indel.getLength());
} else {
bases = indelCounts.getPreferredInsertBases();
}
int period = 0;
if (bases.length() > 0) {
int index = 0;
while ((index+bases.length() < length) && (bases.equals(sequence.substring(index, index+bases.length())))) {
period += 1;
index += bases.length();
}
}
return period;
}
private String getDelRefField(String chromosome, int position, int length) {
return c2r.getSequence(chromosome, position, length+1);
}
private String getInsRefField(String chromosome, int position) {
return c2r.getSequence(chromosome, position, 1);
}
private IndelInfo checkForIndelAtLocus(SAMRecord read, int refPos) {
IndelInfo elem = null;
String contigInfo = read.getStringAttribute("YA");
if (contigInfo != null) {
// Get assembled contig info.
String[] fields = contigInfo.split(":");
int contigPos = Integer.parseInt(fields[1]);
Cigar contigCigar = TextCigarCodec.decode(fields[2]);
// Check to see if contig contains indel at current locus
elem = checkForIndelAtLocus(contigPos, contigCigar, refPos);
if (elem != null) {
// Now check to see if this read supports the indel
IndelInfo readElem = checkForIndelAtLocus(read.getAlignmentStart(),
read.getCigar(), refPos);
// Allow partially overlapping indels to support contig
// (Should only matter for inserts)
if (readElem == null || readElem.getCigarElement().getOperator() != elem.getCigarElement().getOperator()) {
// Read element doesn't match contig indel
elem = null;
} else {
elem.setReadIndex(readElem.getReadIndex());
// If this read overlaps the entire insert, capture the bases.
if (elem.getCigarElement().getOperator() == CigarOperator.I &&
elem.getCigarElement().getLength() == readElem.getCigarElement().getLength()) {
String insertBases = read.getReadString().substring(readElem.getReadIndex(), readElem.getReadIndex()+readElem.getCigarElement().getLength());
elem.setInsertBases(insertBases);
}
}
}
}
return elem;
}
private IndelInfo checkForIndelAtLocus(int alignmentStart, Cigar cigar, int refPos) {
IndelInfo ret = null;
int readIdx = 0;
int currRefPos = alignmentStart;
for (CigarElement element : cigar.getCigarElements()) {
if (element.getOperator() == CigarOperator.M) {
readIdx += element.getLength();
currRefPos += element.getLength();
} else if (element.getOperator() == CigarOperator.I) {
if (currRefPos == refPos+1) {
ret = new IndelInfo(element, readIdx);
break;
}
readIdx += element.getLength();
} else if (element.getOperator() == CigarOperator.D) {
if (currRefPos == refPos+1) {
ret = new IndelInfo(element, readIdx);
break;
}
currRefPos += element.getLength();
} else if (element.getOperator() == CigarOperator.S) {
readIdx += element.getLength();
}
}
return ret;
}
}
| Tweaks to single sample call formatting | src/main/java/abra/cadabra/GermlineProcessor.java | Tweaks to single sample call formatting |
|
Java | mit | 83103f8fffccfc2fd24606794d9c57f8c79cf230 | 0 | diirt/diirt,richardfearn/diirt,ControlSystemStudio/diirt,berryma4/diirt,ControlSystemStudio/diirt,berryma4/diirt,berryma4/diirt,diirt/diirt,diirt/diirt,ControlSystemStudio/diirt,ControlSystemStudio/diirt,diirt/diirt,berryma4/diirt,richardfearn/diirt,richardfearn/diirt | /**
* Copyright (C) 2010-14 pvmanager developers. See COPYRIGHT.TXT
* All rights reserved. Use is subject to license terms. See LICENSE.TXT
*/
package org.epics.pvmanager.test;
import org.epics.pvmanager.PVManager;
import org.epics.pvmanager.PVReader;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.hamcrest.Matchers.*;
import static org.epics.pvmanager.test.ExpressionLanguage.*;
import org.epics.pvmanager.test.TestDataSource;
import static org.epics.util.time.TimeDuration.*;
/**
*
* @author carcassi
*/
public class NotificationTest {
@Test
public void sequentialNotifications() throws Exception{
CounterTestListener listener = new CounterTestListener();
PVReader<Integer> reader = PVManager.read(counter())
.readListener(listener)
.from(new TestDataSource()).maxRate(ofMillis(10));
Thread.sleep(100);
if (listener.isFailed())
fail("listener received wrong notifications");
reader.close();
}
@Test
public void pause() throws Exception{
CounterTestListener listener = new CounterTestListener();
PVReader<Integer> reader = PVManager.read(counter())
.readListener(listener)
.from(new TestDataSource()).maxRate(ofMillis(10));
assertThat(reader.isPaused(), equalTo(false));
Thread.sleep(100);
// Pause
reader.setPaused(true);
assertThat(reader.isPaused(), equalTo(true));
int currentCounter = listener.getNextExpected();
Thread.sleep(100);
assertThat("Notifications were sent when paused.", listener.getNextExpected(), lessThanOrEqualTo(currentCounter+1));
// Resume
reader.setPaused(false);
assertThat(reader.isPaused(), equalTo(false));
Thread.sleep(100);
assertThat("Notifications were not resumed.", listener.getNextExpected(), not(equalTo(currentCounter)));
if (listener.isFailed())
fail("listener received wrong notifications");
reader.close();
}
}
| pvmanager-test/src/test/java/org/epics/pvmanager/test/NotificationTest.java | /**
* Copyright (C) 2010-14 pvmanager developers. See COPYRIGHT.TXT
* All rights reserved. Use is subject to license terms. See LICENSE.TXT
*/
package org.epics.pvmanager.test;
import org.epics.pvmanager.PVManager;
import org.epics.pvmanager.PVReader;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.hamcrest.Matchers.*;
import static org.epics.pvmanager.test.ExpressionLanguage.*;
import org.epics.pvmanager.test.TestDataSource;
import static org.epics.util.time.TimeDuration.*;
/**
*
* @author carcassi
*/
public class NotificationTest {
@Test
public void sequentialNotifications() throws Exception{
CounterTestListener listener = new CounterTestListener();
PVReader<Integer> reader = PVManager.read(counter())
.readListener(listener)
.from(new TestDataSource()).maxRate(ofMillis(10));
Thread.sleep(100);
if (listener.isFailed())
fail("listener received wrong notifications");
reader.close();
}
@Test
public void pause() throws Exception{
CounterTestListener listener = new CounterTestListener();
PVReader<Integer> reader = PVManager.read(counter())
.readListener(listener)
.from(new TestDataSource()).maxRate(ofMillis(10));
assertThat(reader.isPaused(), equalTo(false));
Thread.sleep(100);
// Pause
reader.setPaused(true);
assertThat(reader.isPaused(), equalTo(true));
int currentCounter = listener.getNextExpected();
Thread.sleep(100);
assertThat("Notifications were sent when paused.", listener.getNextExpected(), equalTo(currentCounter));
// Resume
reader.setPaused(false);
assertThat(reader.isPaused(), equalTo(false));
Thread.sleep(100);
assertThat("Notifications were not resumed.", listener.getNextExpected(), not(equalTo(currentCounter)));
if (listener.isFailed())
fail("listener received wrong notifications");
reader.close();
}
}
| test: making test more sturdy | pvmanager-test/src/test/java/org/epics/pvmanager/test/NotificationTest.java | test: making test more sturdy |
|
Java | mit | d4a0e24ba7b75008c32f31a597d307efcd216a04 | 0 | phemt85/PortScanner | package com.github.phemt85;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
public class PortScanner {
public void scan(CommandLineValues command_line_values){
List<Integer> open_ports = null;
if(command_line_values.getPorts() != null){
open_ports = scanPortsList(command_line_values.getIp_address(), command_line_values.getPorts(), command_line_values.getTimeout());
}else{
open_ports = scanAllPorts(command_line_values.getIp_address(), command_line_values.getTimeout());
}
printResults(open_ports);
}
private List<Integer> scanAllPorts(String ip_address, int timeout){
CommandLineLoader loader = new CommandLineLoader();
List<Integer> open_ports = new ArrayList<>();
for (int port = 1; port < 65536; port++) {
if(isPortOpen(ip_address, port, timeout)){
open_ports.add(port);
}
loader.update(port, 65535);
}
return open_ports;
}
private List<Integer> scanPortsList(String ip_address, List<Integer> ports, int timeout){
CommandLineLoader loader = new CommandLineLoader();
List<Integer> open_ports = new ArrayList<>();
for (int port = 0; port < ports.size(); port++) {
if(isPortOpen(ip_address, ports.get(port), timeout)){
open_ports.add(ports.get(port));
}
loader.update(port, ports.size());
}
return open_ports;
}
private boolean isPortOpen(String ip_address, int port, int timeout){
boolean res = false;
try {
Socket socket = new Socket();
socket.connect(new InetSocketAddress(ip_address, port), timeout);
socket.close();
res = true;
} catch (Exception ex) {
}
return res;
}
public void printHelp(){
CommandLinePrinter command_line_printer = new CommandLinePrinter();
command_line_printer.printStr("-Scan all the port on 127.0.0.1", true);
command_line_printer.printStr("java -jar PortScanner<version>.jar -i 127.0.0.1 -t 1000", true);
command_line_printer.printStr("-Scan port 22 on 127.0.0.1", true);
command_line_printer.printStr("java -jar PortScanner<version>.jar -i 127.0.0.1 -t 1000 -p 22", true);
}
private void printResults(List<Integer> ports){
CommandLinePrinter printer = new CommandLinePrinter();
printer.printStr("__________", true);
if(ports.size() > 0){
printer.printStr("Printing open ports:", true);
for (int i = 0; i < ports.size(); i++) {
printer.printStr(String.valueOf(ports.get(i)), true);
}
}else{
printer.printStr("No open ports found!", true);
}
}
}
| src/com/github/phemt85/PortScanner.java | package com.github.phemt85;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.util.ArrayList;
import java.util.List;
public class PortScanner {
public void scan(CommandLineValues command_line_values){
List<Integer> open_ports = null;
if(command_line_values.getPorts() != null){
open_ports = scanPortsList(command_line_values.getIp_address(), command_line_values.getPorts(), command_line_values.getTimeout());
}else{
open_ports = scanAllPorts(command_line_values.getIp_address(), command_line_values.getTimeout());
}
printResults(open_ports);
}
private List<Integer> scanAllPorts(String ip_address, int timeout){
CommandLineLoader loader = new CommandLineLoader();
List<Integer> open_ports = new ArrayList<>();
for (int port = 1; port < 65536; port++) {
if(isPortOpen(ip_address, port, timeout)){
open_ports.add(port);
}
loader.update(port, 65535);
}
return open_ports;
}
private List<Integer> scanPortsList(String ip_address, List<Integer> ports, int timeout){
CommandLineLoader loader = new CommandLineLoader();
List<Integer> open_ports = new ArrayList<>();
for (int port = 0; port < ports.size(); port++) {
if(isPortOpen(ip_address, ports.get(port), timeout)){
open_ports.add(port);
}
loader.update(port, ports.size());
}
return open_ports;
}
private boolean isPortOpen(String ip_address, int port, int timeout){
boolean res = false;
try {
Socket socket = new Socket();
socket.connect(new InetSocketAddress(ip_address, port), timeout);
socket.close();
res = true;
} catch (Exception ex) {
}
return res;
}
public void printHelp(){
CommandLinePrinter command_line_printer = new CommandLinePrinter();
command_line_printer.printStr("-Scan all the port on 127.0.0.1", true);
command_line_printer.printStr("java -jar PortScanner<version>.jar -i 127.0.0.1 -t 1000", true);
command_line_printer.printStr("-Scan port 22 on 127.0.0.1", true);
command_line_printer.printStr("java -jar PortScanner<version>.jar -i 127.0.0.1 -t 1000 -p 22", true);
}
private void printResults(List<Integer> ports){
CommandLinePrinter printer = new CommandLinePrinter();
printer.printStr("__________", true);
if(ports.size() > 0){
printer.printStr("Printing open ports:", true);
for (int i = 0; i < ports.size(); i++) {
printer.printStr(String.valueOf(ports.get(i)), true);
}
}else{
printer.printStr("No open ports found!", true);
}
}
}
| fixed bug in PortScanner.java
| src/com/github/phemt85/PortScanner.java | fixed bug in PortScanner.java |
|
Java | mit | 63b393f20a432e155cc76d8bb537f98f677cab7d | 0 | Magicnation/Magicnation-Mod | package com.juanwan11_magicnation.Items;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.client.Minecraft;
import net.minecraft.enchantment.Enchantment;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemPickaxe;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class itemGemStoneBloodPickaxe extends ItemPickaxe {
//Vars
protected itemGemStoneBloodPickaxe(ToolMaterial toolMaterial) {
super(toolMaterial.EMERALD);
setMaxDamage(50);
}
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack itemStack, EntityPlayer player,List list, boolean bool) {
if (itemStack.isItemEnchanted() != true) {
list.add("You Must Right Click The Pickaxe ");
list.add("If Not Then You Will Lose Your Enchantments ");
} else
list.add("Uses Left: "+ (itemStack.getMaxDamage()-itemStack.getItemDamageForDisplay()));
}
@Override
public ItemStack onItemRightClick(ItemStack itemStack, World world, EntityPlayer player) {
if(itemStack.isItemEnchanted() != true){
itemStack.addEnchantment(Enchantment.fortune, 5);
itemStack.addEnchantment(Enchantment.efficiency, 5);
itemStack.setItemDamage(0);
}else if(itemStack.isItemDamaged())
if(itemStack.getItemDamage()>=50){
player.inventory.consumeInventoryItem(MAItems.itemGemStoneBlood);
itemStack.setItemDamage(itemStack.getItemDamage()-50);
Minecraft.getMinecraft().thePlayer.sendChatMessage("You Have Filled The Pickaxe In Magical Essence And Got 50 Durability");
}else{
player.inventory.consumeInventoryItem(MAItems.itemGemStoneBloodPickaxe);
player.inventory.consumeInventoryItem(MAItems.itemGemStoneBlood);
Minecraft.getMinecraft().thePlayer.sendChatMessage("You Have Filled The Pickaxe In Magical Essence And It Has Exploded");
}
return itemStack;
}
public boolean onBlockDestroyed(ItemStack itemStack, World world, Block block, int x, int y, int z, EntityLivingBase entityLiving)
{
if(itemStack.getItemDamage()==50){
Minecraft.getMinecraft().thePlayer.sendChatMessage("WHY YOU DESTROY THE PICKAXE!!!!");
}
itemStack.damageItem(1, entityLiving);
return false;
}
@Override
public boolean onBlockStartBreak(ItemStack stack, int x, int y, int z, EntityPlayer player) {
Block block = world.getBlock(x, y, z);
int meta = world.getBlockMetadata(x, y, z);
if (ForgeHooks.isToolEffective(stack, block, meta)){
return true;
Block block = world.getBlock(x, y-2, z);
if (ForgeHooks.isToolEffective(stack, block, meta)){
return true;
}
Block block = world.getBlock(x, y+1, z);
if (ForgeHooks.isToolEffective(stack, block, meta)){
return true;
}
}
}
}
| Magicnation/com/juanwan11_magicnation/Items/itemGemStoneBloodPickaxe.java | package com.juanwan11_magicnation.Items;
import java.util.List;
import net.minecraft.block.Block;
import net.minecraft.client.Minecraft;
import net.minecraft.enchantment.Enchantment;
import net.minecraft.entity.EntityLivingBase;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.item.ItemPickaxe;
import net.minecraft.item.ItemStack;
import net.minecraft.world.World;
import cpw.mods.fml.relauncher.Side;
import cpw.mods.fml.relauncher.SideOnly;
public class itemGemStoneBloodPickaxe extends ItemPickaxe {
//Vars
protected itemGemStoneBloodPickaxe(ToolMaterial toolMaterial) {
super(toolMaterial.EMERALD);
setMaxDamage(50);
}
@SideOnly(Side.CLIENT)
public void addInformation(ItemStack itemStack, EntityPlayer player,List list, boolean bool) {
if (itemStack.isItemEnchanted() != true) {
list.add("You Must Right Click The Pickaxe ");
list.add("If Not Then You Will Lose Your Enchantments ");
} else
list.add("Uses Left: "+ (itemStack.getMaxDamage()-itemStack.getItemDamageForDisplay()));
}
@Override
public ItemStack onItemRightClick(ItemStack itemStack, World world, EntityPlayer player) {
if(itemStack.isItemEnchanted() != true){
itemStack.addEnchantment(Enchantment.fortune, 5);
itemStack.addEnchantment(Enchantment.efficiency, 5);
itemStack.setItemDamage(0);
}else if(itemStack.isItemDamaged())
if(itemStack.getItemDamage()>=50){
player.inventory.consumeInventoryItem(MAItems.itemGemStoneBlood);
itemStack.setItemDamage(itemStack.getItemDamage()-50);
Minecraft.getMinecraft().thePlayer.sendChatMessage("You Have Filled The Pickaxe In Magical Essence And Got 50 Durability");
}else{
player.inventory.consumeInventoryItem(MAItems.itemGemStoneBloodPickaxe);
player.inventory.consumeInventoryItem(MAItems.itemGemStoneBlood);
Minecraft.getMinecraft().thePlayer.sendChatMessage("You Have Filled The Pickaxe In Magical Essence And It Has Exploded");
}
return itemStack;
}
public boolean onBlockDestroyed(ItemStack itemStack, World world, Block block, int x, int y, int z, EntityLivingBase entityLiving)
{
if(itemStack.getItemDamage()==50){
Minecraft.getMinecraft().thePlayer.sendChatMessage("WHY YOU DESTROY THE PICKAXE!!!!");
}
itemStack.damageItem(1, entityLiving);
return false;
}
}
| Update itemGemStoneBloodPickaxe.java | Magicnation/com/juanwan11_magicnation/Items/itemGemStoneBloodPickaxe.java | Update itemGemStoneBloodPickaxe.java |
|
Java | mit | e62772ec949f85ac9c215df3161e033d53109e2f | 0 | romanman/ethereumj-obsolete-prototype,romanman/ethereumj-obsolete-prototype | package org.ethereum.crypto;
import static java.util.Arrays.copyOfRange;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.ethereum.db.ByteArrayWrapper;
import org.ethereum.util.RLP;
import org.ethereum.util.Utils;
import org.spongycastle.util.encoders.Hex;
import org.ethereum.util.LRUMap;
public class HashUtil {
private static final int MAX_ENTRIES = 100; // Should contain most commonly hashed values
private static LRUMap<ByteArrayWrapper, byte[]> sha3Cache = new LRUMap<>(0, MAX_ENTRIES);
public static final byte[] EMPTY_DATA_HASH = Hex.decode("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470");
private static final MessageDigest sha256digest;
static {
try {
sha256digest = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e); // Can't happen.
}
}
public static byte[] sha256(byte[] input) {
return sha256digest.digest(input);
}
public static byte[] sha3(byte[] input) {
ByteArrayWrapper inputByteArray = new ByteArrayWrapper(input);
byte[] result = sha3Cache.get(inputByteArray);
if(result != null)
return result;
result = SHA3Helper.sha3(input);
sha3Cache.put(inputByteArray, result);
return result;
}
/**
* Calculates RIGTMOST160(SHA3(input)). This is used in address calculations.
*/
public static byte[] sha3omit12(byte[] input) {
byte[] hash = sha3(input);
return copyOfRange(hash, 12, hash.length);
}
/**
* The way to calculate new address inside ethereum
*
* @param addr - creating addres
* @param nonce - nonce of creating address
* @return new address
*/
public static byte[] calcNewAddr(byte[] addr, byte[] nonce) {
byte[] encSender = RLP.encodeElement(addr);
byte[] encNonce = RLP.encodeElement(nonce);
byte[] newAddress = sha3omit12(RLP.encodeList(encSender, encNonce));
return newAddress;
}
/**
* See {@link ByteUtil#doubleDigest(byte[], int, int)}.
*/
public static byte[] doubleDigest(byte[] input) {
return doubleDigest(input, 0, input.length);
}
/**
* Calculates the SHA-256 hash of the given byte range, and then hashes the resulting hash again. This is
* standard procedure in Bitcoin. The resulting hash is in big endian form.
*/
public static byte[] doubleDigest(byte[] input, int offset, int length) {
synchronized (sha256digest) {
sha256digest.reset();
sha256digest.update(input, offset, length);
byte[] first = sha256digest.digest();
return sha256digest.digest(first);
}
}
/**
* @return generates random peer id for the HelloMessage
*/
public static byte[] randomPeerId() {
byte[] peerIdBytes = new BigInteger(512, Utils.getRandom()).toByteArray();
String peerId = null;
if (peerIdBytes.length > 64)
peerId = Hex.toHexString(peerIdBytes, 1, 64);
else
peerId = Hex.toHexString(peerIdBytes);
return Hex.decode(peerId);
}
}
| src/main/java/org/ethereum/crypto/HashUtil.java | package org.ethereum.crypto;
import static java.util.Arrays.copyOfRange;
import java.math.BigInteger;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import org.ethereum.db.ByteArrayWrapper;
import org.ethereum.util.ByteUtil;
import org.ethereum.util.RLP;
import org.ethereum.util.Utils;
import org.spongycastle.util.encoders.Hex;
import org.ethereum.util.LRUMap;
public class HashUtil {
private static final int MAX_ENTRIES = 1000; // Should contain most commonly hashed values
private static LRUMap<ByteArrayWrapper, byte[]> sha3Cache = new LRUMap<>(0, MAX_ENTRIES);
public static final byte[] EMPTY_DATA_HASH = HashUtil.sha3(new byte[0]);
private static final MessageDigest sha256digest;
static {
try {
sha256digest = MessageDigest.getInstance("SHA-256");
} catch (NoSuchAlgorithmException e) {
throw new RuntimeException(e); // Can't happen.
}
}
public static byte[] sha256(byte[] input) {
return sha256digest.digest(input);
}
public static byte[] sha3(byte[] input) {
ByteArrayWrapper inputByteArray = new ByteArrayWrapper(input);
if(sha3Cache.keySet().contains(inputByteArray))
return sha3Cache.get(inputByteArray);
byte[] result = SHA3Helper.sha3(input);
sha3Cache.put(inputByteArray, result);
return result;
}
/**
* Calculates RIGTMOST160(SHA3(input)). This is used in address calculations.
*/
public static byte[] sha3omit12(byte[] input) {
byte[] hash = sha3(input);
return copyOfRange(hash, 12, hash.length);
}
/**
* The way to calculate new address inside ethereum
*
* @param addr - creating addres
* @param nonce - nonce of creating address
* @return new address
*/
public static byte[] calcNewAddr(byte[] addr, byte[] nonce) {
byte[] encSender = RLP.encodeElement(addr);
byte[] encNonce = RLP.encodeElement(nonce);
byte[] newAddress = HashUtil.sha3omit12(RLP.encodeList(encSender, encNonce));
return newAddress;
}
/**
* See {@link ByteUtil#doubleDigest(byte[], int, int)}.
*/
public static byte[] doubleDigest(byte[] input) {
return doubleDigest(input, 0, input.length);
}
/**
* Calculates the SHA-256 hash of the given byte range, and then hashes the resulting hash again. This is
* standard procedure in Bitcoin. The resulting hash is in big endian form.
*/
public static byte[] doubleDigest(byte[] input, int offset, int length) {
synchronized (sha256digest) {
sha256digest.reset();
sha256digest.update(input, offset, length);
byte[] first = sha256digest.digest();
return sha256digest.digest(first);
}
}
/**
* @return generates random peer id for the HelloMessage
*/
public static byte[] randomPeerId() {
byte[] peerIdBytes = new BigInteger(512, Utils.getRandom()).toByteArray();
String peerId = null;
if (peerIdBytes.length > 64)
peerId = Hex.toHexString(peerIdBytes, 1, 64);
else
peerId = Hex.toHexString(peerIdBytes);
return Hex.decode(peerId);
}
}
| Avoid double Map search
| src/main/java/org/ethereum/crypto/HashUtil.java | Avoid double Map search |
|
Java | mit | 6dfa76aacbf8958294241fc7b38cef7137183289 | 0 | vieck/Save-Dat-Money | package edu.purdue.vieck.budgetapp.Fragments;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import com.github.mikephil.charting.animation.Easing;
import com.github.mikephil.charting.charts.PieChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.components.Legend.LegendPosition;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.PieData;
import com.github.mikephil.charting.data.PieDataSet;
import com.github.mikephil.charting.listener.OnChartValueSelectedListener;
import com.github.mikephil.charting.utils.ColorTemplate;
import com.github.mikephil.charting.utils.Highlight;
import com.github.mikephil.charting.utils.PercentFormatter;
import java.util.ArrayList;
import edu.purdue.vieck.budgetapp.Activities.ChartActivity;
import edu.purdue.vieck.budgetapp.Adapters.ChartAdapter;
import edu.purdue.vieck.budgetapp.DatabaseAdapters.ParseHandler;
import edu.purdue.vieck.budgetapp.DatabaseAdapters.RealmHandler;
import edu.purdue.vieck.budgetapp.R;
public class ChartFragment extends Fragment implements OnChartValueSelectedListener {
int month, year, type;
RealmHandler mRealmHandler;
private int mInstance;
private int yInstance;
private PieChart mPieChart;
private SwipeRefreshLayout mSwipeRefreshLayout;
private RecyclerView mRecyclerView;
private ChartAdapter mChartAdapter;
private Context mContext;
@Override
public void onAttach(final Activity activity) {
mContext = activity.getApplicationContext();
super.onAttach(activity);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_chart, container, false);
Bundle bundle = getArguments();
month = bundle.getInt("month", -1);
year = bundle.getInt("year", -1);
type = bundle.getInt("type", 2);
mRealmHandler = new RealmHandler(getActivity());
mSwipeRefreshLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipe_refresh_layout);
mRecyclerView = (RecyclerView) view.findViewById(R.id.budget_recycler_view);
mChartAdapter = new ChartAdapter(mContext, month, year, type);
mRecyclerView.setLayoutManager(new LinearLayoutManager(mContext));
mRecyclerView.setAdapter(mChartAdapter);
mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
mChartAdapter = new ChartAdapter(mContext, month, year, type);
mRecyclerView.setAdapter(mChartAdapter);
setData(type);
mSwipeRefreshLayout.setRefreshing(false);
}
});
mPieChart = (PieChart) view.findViewById(R.id.pie_chart);
mPieChart = setupPieChart(mPieChart);
setData(type);
return view;
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
mInstance = savedInstanceState.getInt("Month", -1);
yInstance = savedInstanceState.getInt("Year", -1);
} else {
mInstance = -1;
yInstance = -1;
}
}
private PieChart setupPieChart(PieChart chart) {
chart.setDescription("");
chart.setDescriptionColor(getResources().getColor(R.color.White));
chart.setUsePercentValues(true);
chart.setDragDecelerationFrictionCoef(0.95f);
//mTypeface = Typeface.createFromAsset(getAssets(), "OpenSans-Regular.ttf");
chart.setDrawHoleEnabled(true);
//mPieChart.setHoleColor(Color.WHITE);
chart.setCenterTextColor(Color.BLACK);
chart.setTransparentCircleColor(Color.WHITE);
chart.setHoleRadius(45f);
chart.setTransparentCircleRadius(45f);
chart.setDrawCenterText(true);
chart.setRotationAngle(0);
// enable rotation of the chart by touch
chart.setRotationEnabled(true);
// mChart.setUnit(" €");
// mChart.setDrawUnitsInChart(true);
// add a selection listener
chart.setOnChartValueSelectedListener(this);
//mPieChart.setCenterText("MPAndroidChart\nby Philipp Jahoda");
chart.setCenterTextSize(9.5f);
chart.animateY(1500, Easing.EasingOption.EaseInOutQuad);
// mChart.spin(2000, 0, 360);*/
Legend l = chart.getLegend();
l.setPosition(LegendPosition.PIECHART_CENTER);
l.setXEntrySpace(7f);
l.setYEntrySpace(7f);
l.setYOffset(0f);
l.setXOffset(5f);
return chart;
}
private void setData(int type) {
ArrayList<Entry> yVals = new ArrayList<Entry>();
// IMPORTANT: In a PieChart, no values (Entry) should have the same
// xIndex (even if from different DataSets), since no values can be
// drawn above each other.
ArrayList<String> xVals = new ArrayList<String>();
ArrayList<Integer> colors = new ArrayList<Integer>();
if (!mRealmHandler.isEmpty(type)) {
int index = 0;
int total = 0;
if (mRealmHandler.getSpecificDateAmountByType("Misc", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Misc", month, year, type);
total += amount;
yVals.add(new Entry(amount, index++));
xVals.add("Misc");
colors.add(getResources().getColor(R.color.md_white_1000));
}
if (mRealmHandler.getSpecificDateAmountByType("Utilities", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Utilities", month, year, type);
total += amount;
yVals.add(new Entry(amount, index++));
xVals.add("Utilities");
colors.add(ColorTemplate.VORDIPLOM_COLORS[0]);
}
if (mRealmHandler.getSpecificDateAmountByType("Entertainment", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Entertainment", month, year, type);
total += amount;
yVals.add(new Entry(amount, index++));
xVals.add("Entertainment");
colors.add(ColorTemplate.VORDIPLOM_COLORS[1]);
}
if (mRealmHandler.getSpecificDateAmountByType("Medical", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Medical", month, year, type);
total += amount;
yVals.add(new Entry(amount, index++));
xVals.add("Medical");
colors.add(ColorTemplate.VORDIPLOM_COLORS[2]);
}
if (mRealmHandler.getSpecificDateAmountByType("Food", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Food", month, year, type);
total += amount;
yVals.add(new Entry(amount, index++));
xVals.add("Food");
colors.add(ColorTemplate.VORDIPLOM_COLORS[3]);
}
if (mRealmHandler.getSpecificDateAmountByType("Insurance", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Insurance", month, year, type);
total += amount;
yVals.add(new Entry(amount, index++));
xVals.add("Insurance");
colors.add(ColorTemplate.VORDIPLOM_COLORS[4]);
}
if (total > 0) {
PieDataSet dataSet = new PieDataSet(yVals, "Category Legend");
dataSet.setSliceSpace(2f);
dataSet.setSelectionShift(5f);
dataSet.setColors(colors);
PieData data = new PieData(xVals, dataSet);
data.setValueFormatter(new PercentFormatter());
data.setValueTextSize(11f);
data.setValueTextColor(Color.BLACK);
mPieChart.setData(data);
// undo all highlights
mPieChart.highlightValues(null);
mPieChart.invalidate();
}
}
}
public void updateAdapter(int position) {
this.type = position;
if (mChartAdapter != null) {
mChartAdapter.updatePosition(position);
mPieChart.clear();
setData(position);
mPieChart.notifyDataSetChanged();
}
}
@Override
public void onValueSelected(Entry e, int dataSetIndex, Highlight h) {
if (e == null)
return;
Log.i("VAL SELECTED",
"Value: " + e.getVal() + ", xIndex: " + e.getXIndex()
+ ", DataSet index: " + dataSetIndex);
}
@Override
public void onNothingSelected() {
Log.i("PieChart", "nothing selected");
}
}
| app/src/main/java/edu/purdue/vieck/budgetapp/Fragments/ChartFragment.java | package edu.purdue.vieck.budgetapp.Fragments;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.support.v4.app.Fragment;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MenuItem;
import android.view.View;
import android.view.ViewGroup;
import com.github.mikephil.charting.animation.Easing;
import com.github.mikephil.charting.charts.PieChart;
import com.github.mikephil.charting.components.Legend;
import com.github.mikephil.charting.components.Legend.LegendPosition;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.PieData;
import com.github.mikephil.charting.data.PieDataSet;
import com.github.mikephil.charting.listener.OnChartValueSelectedListener;
import com.github.mikephil.charting.utils.ColorTemplate;
import com.github.mikephil.charting.utils.Highlight;
import com.github.mikephil.charting.utils.PercentFormatter;
import java.util.ArrayList;
import edu.purdue.vieck.budgetapp.Activities.ChartActivity;
import edu.purdue.vieck.budgetapp.Adapters.ChartAdapter;
import edu.purdue.vieck.budgetapp.DatabaseAdapters.ParseHandler;
import edu.purdue.vieck.budgetapp.DatabaseAdapters.RealmHandler;
import edu.purdue.vieck.budgetapp.R;
public class ChartFragment extends Fragment implements OnChartValueSelectedListener {
int month, year, type;
RealmHandler mRealmHandler;
private int mInstance;
private int yInstance;
private PieChart mPieChart;
private SwipeRefreshLayout mSwipeRefreshLayout;
private RecyclerView mRecyclerView;
private ChartAdapter mChartAdapter;
private Context mContext;
@Override
public void onAttach(final Activity activity) {
mContext = activity.getApplicationContext();
super.onAttach(activity);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
return super.onOptionsItemSelected(item);
}
@Nullable
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_chart, container, false);
Bundle bundle = getArguments();
month = bundle.getInt("month", -1);
year = bundle.getInt("year", -1);
type = bundle.getInt("type", 2);
mRealmHandler = new RealmHandler(getActivity());
mSwipeRefreshLayout = (SwipeRefreshLayout) view.findViewById(R.id.swipe_refresh_layout);
mRecyclerView = (RecyclerView) view.findViewById(R.id.budget_recycler_view);
mChartAdapter = new ChartAdapter(mContext, month, year, type);
mRecyclerView.setLayoutManager(new LinearLayoutManager(mContext));
mRecyclerView.setAdapter(mChartAdapter);
mSwipeRefreshLayout.setOnRefreshListener(new SwipeRefreshLayout.OnRefreshListener() {
@Override
public void onRefresh() {
mChartAdapter = new ChartAdapter(mContext, month, year, type);
mRecyclerView.setAdapter(mChartAdapter);
setData(type);
mSwipeRefreshLayout.setRefreshing(false);
}
});
mPieChart = (PieChart) view.findViewById(R.id.pie_chart);
mPieChart = setupPieChart(mPieChart);
setData(type);
return view;
}
@Override
public void onActivityCreated(@Nullable Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
if (savedInstanceState != null) {
mInstance = savedInstanceState.getInt("Month", -1);
yInstance = savedInstanceState.getInt("Year", -1);
} else {
mInstance = -1;
yInstance = -1;
}
}
private PieChart setupPieChart(PieChart chart) {
chart.setDescription("");
chart.setDescriptionColor(getResources().getColor(R.color.White));
chart.setUsePercentValues(true);
chart.setDragDecelerationFrictionCoef(0.95f);
//mTypeface = Typeface.createFromAsset(getAssets(), "OpenSans-Regular.ttf");
chart.setDrawHoleEnabled(true);
//mPieChart.setHoleColor(Color.WHITE);
chart.setCenterTextColor(Color.BLACK);
chart.setTransparentCircleColor(Color.WHITE);
chart.setHoleRadius(45f);
chart.setTransparentCircleRadius(45f);
chart.setDrawCenterText(true);
chart.setRotationAngle(0);
// enable rotation of the chart by touch
chart.setRotationEnabled(true);
// mChart.setUnit(" €");
// mChart.setDrawUnitsInChart(true);
// add a selection listener
chart.setOnChartValueSelectedListener(this);
//mPieChart.setCenterText("MPAndroidChart\nby Philipp Jahoda");
chart.setCenterTextSize(9.5f);
chart.animateY(1500, Easing.EasingOption.EaseInOutQuad);
// mChart.spin(2000, 0, 360);*/
Legend l = chart.getLegend();
l.setPosition(LegendPosition.PIECHART_CENTER);
l.setXEntrySpace(7f);
l.setYEntrySpace(7f);
l.setYOffset(0f);
l.setXOffset(5f);
return chart;
}
private void setData(int type) {
ArrayList<Entry> yVals = new ArrayList<Entry>();
// IMPORTANT: In a PieChart, no values (Entry) should have the same
// xIndex (even if from different DataSets), since no values can be
// drawn above each other.
ArrayList<String> xVals = new ArrayList<String>();
ArrayList<Integer> colors = new ArrayList<Integer>();
if (!mRealmHandler.isEmpty(type)) {
int index = 0;
if (mRealmHandler.getSpecificDateAmountByType("Misc", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Misc", month, year, type);
yVals.add(new Entry(amount, index++));
xVals.add("Misc");
colors.add(getResources().getColor(R.color.md_white_1000));
}
if (mRealmHandler.getSpecificDateAmountByType("Utilities", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Utilities", month, year, type);
yVals.add(new Entry(amount, index++));
xVals.add("Utilities");
colors.add(ColorTemplate.VORDIPLOM_COLORS[0]);
}
if (mRealmHandler.getSpecificDateAmountByType("Entertainment", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Entertainment", month, year, type);
yVals.add(new Entry(amount, index++));
xVals.add("Entertainment");
colors.add(ColorTemplate.VORDIPLOM_COLORS[1]);
}
if (mRealmHandler.getSpecificDateAmountByType("Medical", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Medical", month, year, type);
yVals.add(new Entry(amount, index++));
xVals.add("Medical");
colors.add(ColorTemplate.VORDIPLOM_COLORS[2]);
}
if (mRealmHandler.getSpecificDateAmountByType("Food", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Food", month, year, type);
yVals.add(new Entry(amount, index++));
xVals.add("Food");
colors.add(ColorTemplate.VORDIPLOM_COLORS[3]);
}
if (mRealmHandler.getSpecificDateAmountByType("Insurance", month, year, ((ChartActivity) getActivity()).getSpinnerPosition()) != 0) {
float amount = mRealmHandler.getSpecificDateAmountByType("Insurance", month, year, type);
yVals.add(new Entry(amount, index++));
xVals.add("Insurance");
colors.add(ColorTemplate.VORDIPLOM_COLORS[4]);
}
PieDataSet dataSet = new PieDataSet(yVals, "Category Legend");
dataSet.setSliceSpace(2f);
dataSet.setSelectionShift(5f);
dataSet.setColors(colors);
PieData data = new PieData(xVals, dataSet);
data.setValueFormatter(new PercentFormatter());
data.setValueTextSize(11f);
data.setValueTextColor(Color.BLACK);
mPieChart.setData(data);
// undo all highlights
mPieChart.highlightValues(null);
mPieChart.invalidate();
}
}
public void updateAdapter(int position) {
this.type = position;
if (mChartAdapter != null) {
mChartAdapter.updatePosition(position);
mPieChart.clear();
setData(position);
mPieChart.notifyDataSetChanged();
}
}
@Override
public void onValueSelected(Entry e, int dataSetIndex, Highlight h) {
if (e == null)
return;
Log.i("VAL SELECTED",
"Value: " + e.getVal() + ", xIndex: " + e.getXIndex()
+ ", DataSet index: " + dataSetIndex);
}
@Override
public void onNothingSelected() {
Log.i("PieChart", "nothing selected");
}
}
| No chart dat available works again
| app/src/main/java/edu/purdue/vieck/budgetapp/Fragments/ChartFragment.java | No chart dat available works again |
|
Java | agpl-3.0 | 2c6d2018e58057877aee88f8b9516e5ef546895c | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | 0d86fcd4-2e62-11e5-9284-b827eb9e62be | hello.java | 0d818286-2e62-11e5-9284-b827eb9e62be | 0d86fcd4-2e62-11e5-9284-b827eb9e62be | hello.java | 0d86fcd4-2e62-11e5-9284-b827eb9e62be |
|
Java | agpl-3.0 | 399ac099c43685c981dc882983acaec1f21710cd | 0 | duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test,duaneking/rockstar_test | eb950eae-2e61-11e5-9284-b827eb9e62be | hello.java | eb8f924e-2e61-11e5-9284-b827eb9e62be | eb950eae-2e61-11e5-9284-b827eb9e62be | hello.java | eb950eae-2e61-11e5-9284-b827eb9e62be |
|
Java | lgpl-2.1 | 80486e7dc512f1bfa491658f45494556d64b43c5 | 0 | huib/2IMA00 | package Alg.Kernelization;
import org.jgrapht.Graphs;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.Multigraph;
import org.jgrapht.alg.util.UnionFind;
import java.util.*;
/**
* Created by Christopher on 6/15/2016.
*
* 2-Approximation Algorithm (FEEDBACK) from: http://epubs.siam.org/doi/abs/10.1137/S0895480196305124
*
* FEEDBACK outputs a feedback vertex superset, if a FVS is present in graph G
* Definitions:
* • A graph is called clean if it contains no vertex of degree less than 2 (This is done using reduction rules 0 and 1)
* • A cycle C is semidisjoint if, for every vertex u of C, d(u) = 2 with at most one exception.
*
* Summary of FEEDBACK:
* First, given a graph (G, w) with G = (V,E), any vertex of weight zero is removed from G and placed into the solution
* F at the outset. [However, we can skip this step in our code because we add the weights to the vertices ourselves
* with a default value of 1]
* After that, FEEDBACK initiates a While-loop until G becomes empty. The while-loop decomposes graph (G, w) into
* subgraphs (Gi, wi)’s by iteratively:
* • subtracting wi from w
* • removing vertices of weight reduced to zero
* • adding them into solution F
* • and cleaning up G (reduction rule 0 and 1)
*
* The subgraph Gi that is derived in the ith iteration is either a semidisjoint cycle C contained in G or,
* otherwise, G itself. Note that the first case has precedence over the second; that is, Gi is a semidisjoint cycle
* whenever G contains one.
*
*
* The value for gamma depends on the following cases:
* • Case 1: SDC was found; gamma = min{w(u) : u ∈ V (C)}, for vertex-weights w(u) and semidisjoint cycle C
* • Case 2: no SDC was found; gamma = min{w(u)/(d(u) − 1) : u ∈ V }, for degree d(u)
* ~Note that the value for gamma changes for every iteration
*
* After creating F, the algorithm checks for redundant vertices in F and removes them, before returning F.
*/
public class Approximation {
/**
* Helper function to produce clean graphs (with degree >= 2)
*
* @param solution The solution from the current iteration (G-F)
* @param vertex Vertex from the current iteration
* @param degree Degree of the vertex in the current iteration
*/
public static ReductionSolution cleanUp(ReductionSolution solution, Integer vertex, int degree)
{
// Rule 0 & Rule 1
if (degree <= 1) {
Kernelization.removeVertex(solution, vertex, false);
}
return solution;
}
/**
* Helper function to find gamma value for the semidisjoint cycle case
*
* @param graph The graph G-F from the current iteration
* @param semiDisjointCycle The vertices from the semidisjoint cycle C of the current iteration
*/
public static float gammaCase1(Multigraph<Integer, DefaultEdge> graph, List<WeightedVertex> semiDisjointCycle)
{
float gamma = semiDisjointCycle.get(0).weight;
for (WeightedVertex c : semiDisjointCycle) {
if (c.weight < gamma) {
gamma = c.weight;
}
}
return gamma;
}
/**
* Helper function to find gamma value for the case that no semidisjoint cycle was found
*
* @param graph The graph G-F from the current iteration
*/
public static float gammaCase2(Multigraph<Integer, DefaultEdge> graph, Integer[] vertices)
{
if (graph.vertexSet().size() == 0) System.out.println("ERROR: empty graph in gamma2 function.");
int initializeDegree = -1;
WeightedVertex initializeVertex = new WeightedVertex(-1);
for(int i=0; i<graph.vertexSet().size(); i++) {
if(graph.containsVertex(vertices[i])) {
initializeDegree = graph.degreeOf(vertices[i]);
initializeVertex = new WeightedVertex(vertices[i]);
break;
}
}
float gamma = initializeVertex.weight / (initializeDegree - 1); // initialize gamma value to compare with
for (Integer v : vertices) {
if (!graph.containsVertex(v)) {
continue;
}
int degree = graph.degreeOf(v);
WeightedVertex wv = new WeightedVertex(v);
if (gamma > wv.weight / (degree - 1)) { // set new min gamma value
gamma = (wv.weight / (degree - 1));
}
}
return gamma;
}
public static ReductionSolution determineFVS(Multigraph<Integer, DefaultEdge> ingraph, boolean cloneGraph, Integer[] weightedVertices, int weight) // changed from boolean to int
{
Multigraph<Integer, DefaultEdge> graph = cloneGraph ? (Multigraph<Integer, DefaultEdge>) ingraph.clone(): ingraph;
Deque<Integer> STACK = new ArrayDeque();
Integer[] vertices = (graph.vertexSet()).toArray(new Integer[graph.vertexSet().size()]);
return Approximation.determineFVS(ingraph, graph, vertices, STACK, weightedVertices, weight);
}
/**
* Determine the FVS (superset) of G (see FEEDBACK pseudo-code from paper)
*
* @param graph
* @param vertices
* @param weightedVertices
* @param weight
* @return
*/
public static ReductionSolution determineFVS(Multigraph<Integer, DefaultEdge> ingraph, Multigraph<Integer, DefaultEdge> graph, Integer[] vertices, Deque<Integer> STACK, Integer[] weightedVertices, int weight){
float gammaCase1, gammaCase2;
ReductionSolution solution = new ReductionSolution();
solution.reducedGraph = graph;
/**
* Iterative reduction of G to G-F by checking for semidisjoint cycles.
*
* We fill the STACK with all vertices with weight reduced to 0. After that, we remove the vertices from
* this STACK that turn out to be redundant and add the rest to our solution F.
*/
for (Integer v : vertices) {
if (!solution.reducedGraph.containsVertex(v)) {
continue;
}
WeightedVertex u = new WeightedVertex(v);
int degree = solution.reducedGraph.degreeOf(u.id);
if (degree <= 1) { // safety check; however, this should never occur
continue;
}
// we now check if G contains semidisjoint cycles [SDC] (plural)
// • This includes steps that resemble kernelization rule 2, but rule 2 is executed slightly different
// • If a vertex isn't a member of an SDC, we reduce its weight by gamma := min{w(u)/(d(u) − 1) : u ∈ V }, for vertex u with weight w(u) and degree d(u)
// • gamma reduction creates an ordering of vertices for STACK, which is used to check for redundant vertices later on
if (degree == 2) {
List<WeightedVertex> semiDisjointCycle = new ArrayList();
List<Integer> leftNeighbors;
List<Integer> rightNeighbors;
List<Integer> neighbors = Graphs.neighborListOf(solution.reducedGraph, v);
WeightedVertex leftNeighbor = new WeightedVertex(neighbors.get(0));
WeightedVertex rightNeighbor = new WeightedVertex(neighbors.get(1));
// Create new vertex placeholders that will be overwritten in the loops
Integer predecessor = u.id;
Integer vertexPlaceholder = -1;
// prematurely add vertices to our potential semidisjointCycle container
semiDisjointCycle.add(u);
semiDisjointCycle.add(leftNeighbor);
semiDisjointCycle.add(rightNeighbor);
if (leftNeighbor == rightNeighbor) { // we have a self-loop -> remove it
Kernelization.removeVertex(solution, u.id, false);
Kernelization.removeVertex(solution, leftNeighbor.id, true);
} else { // check if degrees of both neighbors uplod the properties of an SDC
int degreeLeftNeighbor = solution.reducedGraph.degreeOf(leftNeighbor.id);
WeightedVertex l1; // placeholder for one of the neighbors of leftNeighbor
WeightedVertex l2; // placeholder for one of the neighbors of leftNeighbor
WeightedVertex leftException; // placeholder for leftNeighbor.neighbor that violates SDC rules
int degreeRightNeighbor = solution.reducedGraph.degreeOf(rightNeighbor.id);
WeightedVertex r1; // placeholder for one of the neighbors of rightNeighbor
WeightedVertex r2; // placeholder for one of the neighbors of rightNeighbor
WeightedVertex rightException; // placeholder for rightNeighbor.neighbor that violates SDC rules
while (degreeLeftNeighbor == 2) { // still potential vertex contained SDC?
leftNeighbors = Graphs.neighborListOf(solution.reducedGraph, leftNeighbor.id);
vertexPlaceholder = leftNeighbor.id;
l1 = new WeightedVertex(leftNeighbors.get(0));
l2 = new WeightedVertex(leftNeighbors.get(1));
if (l1.id != predecessor) { // make sure the neighbor we process wasn't already looked at before
degreeLeftNeighbor = ingraph.degreeOf(l1.id); // get degree of v in original graph G
semiDisjointCycle.add(l1);
leftNeighbor = l1; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
} else {
degreeLeftNeighbor = ingraph.degreeOf(l2.id);
semiDisjointCycle.add(l2);
leftNeighbor = l2; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
}
predecessor = vertexPlaceholder; // remember vertex used in previous iteration to avoid reviewing it again
}
leftException = leftNeighbor; // semidisjoint cycle exception found
predecessor = u.id; //reset value for rightNeighbor-loop
while (degreeRightNeighbor == 2) { // still potential vertex contained SDC?
rightNeighbors = Graphs.neighborListOf(solution.reducedGraph, rightNeighbor.id);
vertexPlaceholder = rightNeighbor.id;
r1 = new WeightedVertex(rightNeighbors.get(0));
r2 = new WeightedVertex(rightNeighbors.get(1));
if (r1.id != predecessor) { // make sure the neighbor we process wasn't already looked at before
degreeRightNeighbor = ingraph.degreeOf(r1.id); // get degree of v in original graph G
semiDisjointCycle.add(r1);
rightNeighbor = r1; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
} else {
degreeRightNeighbor = ingraph.degreeOf(r2.id);
semiDisjointCycle.add(r2);
rightNeighbor = r2; // set leftNeighbor of next loop (this is why we needed vertexPlaceholder)
}
predecessor = vertexPlaceholder; // remember vertex used in previous iteration to avoid reviewing it again
}
rightException = rightNeighbor; // semidisjoint cycle exception found
// An SDC may contain at most 1 exception, so we must have that (leftException == rightException)
if (leftException == rightException) { // Case 1: SDC found in current graph
gammaCase1 = gammaCase1(solution.reducedGraph, semiDisjointCycle);
for (WeightedVertex c : semiDisjointCycle) { // for all members of the cycle
for (Integer w : vertices) {
if (!solution.reducedGraph.containsVertex(w)) {
continue;
}
if (w == c.id) {
c.weight = c.weight - gammaCase1;
if (c.weight <= 0) {
STACK.push(c.id); // add vertex to STACK
solution.reducedGraph.removeVertex(c.id); // update G-F
}
}
}
}
} else { // Case 2: no SDC found in current graph
gammaCase2 = gammaCase2(solution.reducedGraph, vertices);
u.weight = u.weight - gammaCase2 * (degree - 1); // only for the observed vertex
if (u.weight <= 0) {
if (solution.reducedGraph.containsVertex(u.id)) {
STACK.push(u.id);
solution.reducedGraph.removeVertex(u.id); // update G-F
}
}
}
semiDisjointCycle.clear(); // clear collection for next iteration
} // endif (left != right)
} else { // endif (degree == 2)
// in case we know for certain that the vertex does not belong to an SDC, immediately do:
gammaCase2 = gammaCase2(solution.reducedGraph, vertices);
u.weight = u.weight - gammaCase2 * (degree - 1); // only for the observed vertex
if (u.weight <= 0) {
if (solution.reducedGraph.containsVertex(u.id)) {
STACK.push(u.id);
solution.reducedGraph.removeVertex(u.id); // update G-F
}
}
}
// cleanup G (again) until no more vertices with degrees <=1 exist
cleanUp(solution, v, degree);
}// endfor (v:vertices)
// At this point, G-F contains no (more) SDC and STACK contains all potential vertices from the solution.
// • We now filter out redundant vertices by popping vertices from STACK and checking whether placing them back
// into G-F would create a cycle. We do this using UnionFind
UnionFind<Integer> union = new UnionFind(solution.reducedGraph.vertexSet());
while (!STACK.isEmpty()){
Integer currentVertex = STACK.peek(); // view top item from stack
// get all edges from current vertex in the original graph G
LinkedList<DefaultEdge> edges = new LinkedList(ingraph.edgesOf(currentVertex));
// get all corresponding neigbors n and, if n in G-F, store them in collection: neighbors
List<Integer> neighbors = new ArrayList();
for (DefaultEdge e:edges) {
Integer neighbor = Graphs.getOppositeVertex(ingraph, e, currentVertex);
if (solution.reducedGraph.containsVertex(neighbor)) {
neighbors.add(neighbor);// if neighbor in G-F
}
}
// check if v is connected to the same component more than once using a treeset (duplicates)
TreeSet<Integer> neighborComponents = new TreeSet();
boolean hasDuplicates = false;
// check for multiple neighbors of currentVertex that are members of the same component
for ( Integer n:neighbors ) {
neighborComponents.add(union.find(n)); // adds identifier of component to treeset
hasDuplicates |= !neighborComponents.add(union.find(n));
if (hasDuplicates) break; // we found a loop
}
// in case we didn't find a loop, currentVertex is redundant
if(!hasDuplicates){
union.addElement(currentVertex); // add currentVertex back into G-F
for ( Integer n:neighbors ) {
if (solution.reducedGraph.containsVertex(n)) {
union.union(currentVertex, n); // connect the vertex to its neighbors in G-F (UnionFind components)
}
}
solution.reducedGraph.addVertex(currentVertex); // add vertex back to G-F
} else { //if we found a loop, currentVertex is essential
solution.verticesToRemoved.add(currentVertex); // add currentVertex to solution F
}
STACK.pop();
}
// Next we update any vertex whose weight was artificially increased for certain reduction rules
int c = 0;
for (int v : solution.verticesToRemoved ) {
for (int w : weightedVertices){
if(v == w) c++;
}
}
solution.totalFVSweight = solution.verticesToRemoved.size() + c*(weight-1);
if (solution.totalFVSweight > solution.verticesToRemoved.size()){
System.out.println("debug");
}
return solution;
}
}
| src/Alg/Kernelization/Approximation.java | package Alg.Kernelization;
import org.jgrapht.Graphs;
import org.jgrapht.graph.DefaultEdge;
import org.jgrapht.graph.Multigraph;
import org.jgrapht.alg.util.UnionFind;
import java.lang.reflect.Array;
import java.util.*;
/**
* Created by Christopher on 6/15/2016.
*
* 2-Approximation Algorithm (FEEDBACK) from: http://epubs.siam.org/doi/abs/10.1137/S0895480196305124
*
* FEEDBACK outputs a feedback vertex superset, if a FVS is present in graph G
* Definitions:
* • A graph is called clean if it contains no vertex of degree less than 2 (This is done using reduction rules 0 and 1)
* • A cycle C is semidisjoint if, for every vertex u of C, d(u) = 2 with at most one exception.
*
* Summary of FEEDBACK:
* First, given a graph (G, w) with G = (V,E), any vertex of weight zero is removed from G and placed into the solution
* F at the outset. [However, we can skip this step in our code because we add the weights to the vertices ourselves
* with a default value of 1]
* After that, FEEDBACK initiates a While-loop until G becomes empty. The while-loop decomposes graph (G, w) into
* subgraphs (Gi, wi)’s by iteratively:
* • subtracting wi from w
* • removing vertices of weight reduced to zero
* • adding them into solution F
* • and cleaning up G (reduction rule 0 and 1)
*
* The subgraph Gi that is derived in the ith iteration is either a semidisjoint cycle C contained in G or,
* otherwise, G itself. Note that the first case has precedence over the second; that is, Gi is a semidisjoint cycle
* whenever G contains one.
*
* After creating F, the algorithm checks for redundant vertices in F and removes them, before returning F.
*/
public class Approximation {
/**
* Helper function to produce clean graphs (with degree >= 2)
*
* @param solution The solution from the current iteration (G-F)
* @param vertex Vertex from the current iteration
* @param degree Degree of the vertex in the current iteration
*/
public static ReductionSolution cleanUp(ReductionSolution solution, Integer vertex, int degree)
{
// Rule 0 & Rule 1
if (degree <= 1) {
Kernelization.removeVertex(solution, vertex, false);
}
return solution;
}
/**
* Helper function to find gamma value for the semidisjoint cycle case
*
* @param graph The graph G-F from the current iteration
* @param semiDisjointCycle The vertices from the semidisjoint cycle C of the current iteration
*/
public static float gammaCase1(Multigraph<Integer, DefaultEdge> graph, List<Integer> semiDisjointCycle)
{
float gamma = -999; // initialize minimum degree
for (Integer c : semiDisjointCycle) {
int degree = graph.degreeOf(c);
if (degree == -999 || degree < gamma) {
gamma = degree;
}
}
return gamma;
}
/**
* Helper function to find gamma value for the case that no semidisjoint cycle was found
*
* @param graph The graph G-F from the current iteration
*/
public static float gammaCase2(Multigraph<Integer, DefaultEdge> graph, Integer[] vertices)
{
int initializeDegree = graph.degreeOf(vertices[0]);
WeightedVertex initializeVertex = new WeightedVertex(vertices[0]);
float gamma = initializeVertex.weight / (initializeDegree - 1); // initialize gamma value to compare with
for (Integer v : vertices) {
if (!graph.containsVertex(v)) {
continue;
}
int degree = graph.degreeOf(v);
WeightedVertex wv = new WeightedVertex(v);
if (gamma > wv.weight / (degree - 1)) { // set new min gamma value
gamma = (wv.weight / (degree - 1));
}
}
return gamma;
}
public static ReductionSolution determineFVS(Multigraph<Integer, DefaultEdge> ingraph, boolean cloneGraph, Integer[] weightedVertices, int weight) // changed from boolean to int
{
Multigraph<Integer, DefaultEdge> graph = cloneGraph ? (Multigraph<Integer, DefaultEdge>) ingraph.clone(): ingraph;
ArrayList<Integer> approxVerticesToRemoved = new ArrayList();
Integer[] vertices = (graph.vertexSet()).toArray(new Integer[graph.vertexSet().size()]);
return Approximation.determineFVS(ingraph, graph, vertices, approxVerticesToRemoved, weightedVertices, weight);
}
/**
* Determine the FVS (superset) of G (see FEEDBACK pseudo-code from paper)
*
* @param graph
* @param vertices
* @param weightedVertices
* @param weight
* @return
*/
public static ReductionSolution determineFVS(Multigraph<Integer, DefaultEdge> ingraph, Multigraph<Integer, DefaultEdge> graph, Integer[] vertices, ArrayList<Integer> approxVerticesToRemoved, Integer[] weightedVertices, int weight){
float gammaCase1, gammaCase2;
Multigraph<Integer, DefaultEdge> tempgraph = (Multigraph<Integer, DefaultEdge>) ingraph.clone();
ReductionSolution solution = new ReductionSolution();
solution.reducedGraph = graph;
/**
* Iterative reduction of G to G-F.
*
* We fill the approximated solution set F with all vertices with weight reduced to 0, being at all vertices from
* all the semidisjoint cycles (SDC) that are detected. After that, we remove the vertices from this solution that
* turn out to be redundant.
*
* Note 1: we cannot merge both for-loops on vertices v, because we need to process each vertex once in order to
* find the minimum value for gamma which is used to decrease vertex weights in the following for-loop.
* Note 2: Since our default weight is 1, gamma equals 1 for vertices from an SDC.
*/
for (Integer v : vertices) {
if (!solution.reducedGraph.containsVertex(v)) {
continue;
}
WeightedVertex u = new WeightedVertex(v);
int degree = solution.reducedGraph.degreeOf(u.id);
// cleanup G (again) until nog more vertices with degrees <=1 exist
cleanUp(solution, v, degree);
if (degree <= 1) { // safety check
continue;
}
// we now check if G contains semidisjoint cycles [SDC] (plural)
// • This includes steps that resemble kernelization rule 2, but rule 2 is executed slightly different
// • If a vertex isn't a member of an SDC, we reduce its weight by gamma := min{w(u)/(d(u) − 1) : u ∈ V }, for vertex u with weight w(u) and degree d(u)
// • gamma reduction creates and ordering of vertices for STACK, which is used to check for redundancy
if (degree == 2) {
List<Integer> semiDisjointCycle = new ArrayList();
List<Integer> leftNeighbors;
List<Integer> rightNeighbors;
semiDisjointCycle.add(v);
List<Integer> neighbors = Graphs.neighborListOf(solution.reducedGraph, v);
Integer left = neighbors.get(0);
Integer right = neighbors.get(1);
Integer departureVertex = v;
Integer terminal = -1;
// clear this at the end if it turns out v is not contained in an SDC
// otherwise
semiDisjointCycle.add(v);
semiDisjointCycle.add(left); // clear this if it turns out v is not contained in an SDC
semiDisjointCycle.add(right); // clear this if it turns out v is not contained in an SDC
if (left == right) {
// would removing v create a self-loop? if yes, put neighbor of v in solution and remove both
Kernelization.removeVertex(solution, v, false);
Kernelization.removeVertex(solution, left, true);
} else { // a != b, check cycle for matching exception vertex
int degreeLeft = solution.reducedGraph.degreeOf(left);
Integer l1;
Integer l2;
Integer leftException;
int degreeRight = solution.reducedGraph.degreeOf(right);
Integer r1;
Integer r2;
Integer rightException;
while (degreeLeft == 2) { // still potential vertex contained SDC?
leftNeighbors = Graphs.neighborListOf(solution.reducedGraph, left);
terminal = left;
l1 = leftNeighbors.get(0);
l2 = leftNeighbors.get(1);
if (l1 != departureVertex) {
degreeLeft = ingraph.degreeOf(l1); // use degree of original graph! maybe degree got lowered cause of other SDC searches
semiDisjointCycle.add(l1);
left = l1;
} else {
degreeLeft = ingraph.degreeOf(l2);
semiDisjointCycle.add(l2);
left = l2;
}
departureVertex = terminal;
}
leftException = left; // semidisjoint cycle exception found
while (degreeRight == 2) {
rightNeighbors = Graphs.neighborListOf(solution.reducedGraph, right);
terminal = right;
r1 = rightNeighbors.get(0);
r2 = rightNeighbors.get(1);
if (r1 != departureVertex) {
degreeRight = ingraph.degreeOf(r1);
semiDisjointCycle.add(r1);
right = r1;
} else {
degreeRight = ingraph.degreeOf(r2);
semiDisjointCycle.add(r2);
right = r2;
}
departureVertex = terminal;
}
rightException = right; // another semidisjoint cycle exception found
/**
* We now set gamma value for current iteration and subtract from the appropriate vertices
* • Case 1: SDC was found; gamma = min{w(u) : u ∈ V (C)}, for vertex-weights w(u) and semidisjoint cycle C
* • Case 2: no SDC was found; gamma = min{w(u)/(d(u) − 1) : u ∈ V }, for degree d(u)
*
* Note 1: We skip over vertices with degree <= 1 because these are technically still contained within our graph,
* even though we already performed a cleanUp on it.
* Note 2: gamma changes for every iteration of the for-loop following this one, depending on the case of either
* finding a semidisjoint cycle (SDC) or not.
*/
// An SDC may contain at most 1 exception, so we must have that (leftException == rightException)
if (leftException == rightException) { // Case 1
gammaCase1 = gammaCase1(solution.reducedGraph, semiDisjointCycle);
for (Integer c : semiDisjointCycle) {
for (Integer w : vertices) {
if (!solution.reducedGraph.containsVertex(w)) {
continue;
}
if (w == c) {
WeightedVertex wv = new WeightedVertex(w);
wv.weight = wv.weight - gammaCase1;
if (wv.weight <= 0) {
if (!approxVerticesToRemoved.contains(w)) {
approxVerticesToRemoved.add(w); // add to approx solution
}
solution.reducedGraph.removeVertex(w); // update G-F
}
}
}
}
} else { // Case 2
gammaCase2 = gammaCase2(solution.reducedGraph, vertices);
for (Integer w : vertices) {
if (!solution.reducedGraph.containsVertex(w)) {
continue;
}
WeightedVertex wv = new WeightedVertex(w);
wv.weight = wv.weight - gammaCase2 * (degree - 1);
if (wv.weight <= 0) {
if (!approxVerticesToRemoved.contains(w)) {
approxVerticesToRemoved.add(w); // add to approx solution
}
solution.reducedGraph.removeVertex(w); // update G-F
}
}
}
semiDisjointCycle.clear(); // prep for next iteration
} // endif (left != right)
} else { // endif (degree == 2)
// gamma value Case 2 again
gammaCase2 = gammaCase2(solution.reducedGraph, vertices);
for (Integer w : vertices) {
if (!solution.reducedGraph.containsVertex(w)) {
continue;
}
WeightedVertex wv = new WeightedVertex(w);
wv.weight = wv.weight - gammaCase2 * (degree - 1);
if (wv.weight <= 0) {
if (!approxVerticesToRemoved.contains(w)) {
approxVerticesToRemoved.add(w); // add to approx solution
}
solution.reducedGraph.removeVertex(w); // update G-F
}
}
}
}// endfor (v:vertices)
// The previous steps empties G completely, so now we create forest G-F from scratch
boolean GminusF = tempgraph.removeAllVertices(approxVerticesToRemoved);
if(GminusF){
solution.reducedGraph = tempgraph;
} else {
System.out.println("ERROR: Something wen't wrong when creating G - F.");
}
// Current status: Any semidisjoint cycle (SDC) contained within G is now in F. G-F contains no (more) SDC
// We now filter out redundant vertices from F, since we added entire cycles instead of singular vertices
UnionFind<Integer> union = new UnionFind(solution.reducedGraph.vertexSet());
ArrayList<Integer> toBeRemoved = new ArrayList();
for (Integer v : approxVerticesToRemoved ) {
if (solution.verticesToRemoved.contains(v)) continue; // if already added to definitive solution F
LinkedList<DefaultEdge> edges = new LinkedList(ingraph.edgesOf(v));// all edges from v in the original graph G
List<Integer> neighbors = new ArrayList();
for (DefaultEdge e:edges) {
Integer neighbor = Graphs.getOppositeVertex(ingraph, e, v);
if (solution.reducedGraph.containsVertex(neighbor)) { // if neighbor in G-F
if (!neighbors.contains(neighbor)) neighbors.add(neighbor);
}
}
TreeSet<Integer> neighborComponents = new TreeSet();
boolean hasDuplicates = false; // check if v is connected to the same component more than once == duplicate
for ( Integer n:neighbors ) {
if (solution.reducedGraph.containsVertex(n)) { // if n in G - F
neighborComponents.add(union.find(n));
hasDuplicates |= !neighborComponents.add(union.find(n));
}
if (hasDuplicates) break; // v is essential
}
if(!hasDuplicates){ // v is redundant
union.addElement(v);
for ( Integer n:neighbors ) {
if (solution.reducedGraph.containsVertex(n)) { // if n in G - F
union.union(v, n);
}
}
solution.reducedGraph.addVertex(v); // add vertex back to G-F
toBeRemoved.add(v); // equivalent for "STACK" from FEEDBACK (paper ALG)
} else {
solution.verticesToRemoved.add(v); // add v to definitive solution F
solution.reducedK -= 1; // reduce k by one
}
}
approxVerticesToRemoved.removeAll(toBeRemoved); // remove (e.g. "pop STACK") after loop
// now, to account for any vertex whose weight was artificially increased:
int c = 0;
for (int v : solution.verticesToRemoved ) {
for (int w : weightedVertices){
if(v == w) c++;
}
}
int total_FVS_weight = solution.verticesToRemoved.size() + c*(weight-1);
solution.totalFVSweight = total_FVS_weight;
return solution; // where: solution.verticesToRemoved = F
}
}
| Approximation refactored to look like FEEDBACK algorithm
| src/Alg/Kernelization/Approximation.java | Approximation refactored to look like FEEDBACK algorithm |
|
Java | apache-2.0 | 8ad4387effbdb84556245d54cb7285be38892bef | 0 | jdeppe-pivotal/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,jdeppe-pivotal/geode,smgoller/geode,jdeppe-pivotal/geode,jdeppe-pivotal/geode,jdeppe-pivotal/geode,masaki-yamakawa/geode,jdeppe-pivotal/geode,smgoller/geode,masaki-yamakawa/geode,smgoller/geode,masaki-yamakawa/geode,smgoller/geode,masaki-yamakawa/geode,smgoller/geode,masaki-yamakawa/geode,masaki-yamakawa/geode,smgoller/geode,smgoller/geode | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.executor.hash;
import static org.apache.geode.distributed.ConfigurationProperties.MAX_WAIT_TIME_RECONNECT;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import redis.clients.jedis.Jedis;
import org.apache.geode.redis.ConcurrentLoopingThreads;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.rules.MemberVM;
import org.apache.geode.test.dunit.rules.RedisClusterStartupRule;
public class HGetDUnitTest {
@ClassRule
public static RedisClusterStartupRule clusterStartUp = new RedisClusterStartupRule(3);
private static final String LOCAL_HOST = "127.0.0.1";
private static final int ITERATION_COUNT = 500;
private static final int JEDIS_TIMEOUT = Math.toIntExact(GeodeAwaitility.getTimeout().toMillis());
private static Jedis jedis1;
private static Jedis jedis2;
private static Properties locatorProperties;
private static MemberVM locator;
private static MemberVM server1;
private static MemberVM server2;
private static int redisServerPort1;
private static int redisServerPort2;
@BeforeClass
public static void classSetup() {
locatorProperties = new Properties();
locatorProperties.setProperty(MAX_WAIT_TIME_RECONNECT, "15000");
locator = clusterStartUp.startLocatorVM(0, locatorProperties);
server1 = clusterStartUp.startRedisVM(1, locator.getPort());
server2 = clusterStartUp.startRedisVM(2, locator.getPort());
redisServerPort1 = clusterStartUp.getRedisPort(1);
redisServerPort2 = clusterStartUp.getRedisPort(2);
jedis1 = new Jedis(LOCAL_HOST, redisServerPort1, JEDIS_TIMEOUT);
jedis2 = new Jedis(LOCAL_HOST, redisServerPort2, JEDIS_TIMEOUT);
}
@Before
public void testSetup() {
jedis1.flushAll();
}
@AfterClass
public static void tearDown() {
jedis1.disconnect();
jedis2.disconnect();
server1.stop();
server2.stop();
}
@Test
public void hgetReturnsNewValues_whenPuttingValues() {
String key = "key";
Map<String, String> expectedMap = new HashMap<>();
new ConcurrentLoopingThreads(ITERATION_COUNT,
(i) -> {
String field = "field-" + i;
String value = "value-" + i;
jedis1.hset(key, field, value);
expectedMap.put(field, value);
},
(i) -> GeodeAwaitility.await().atMost(Duration.ofSeconds(60)).untilAsserted(
() -> assertThat(jedis2.hget(key, "field-" + i)).isEqualTo("value-" + (i))))
.runInLockstep();
assertThat(jedis1.hgetAll(key)).containsExactlyInAnyOrderEntriesOf(expectedMap);
}
}
| geode-apis-compatible-with-redis/src/distributedTest/java/org/apache/geode/redis/internal/executor/hash/HGetDUnitTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.redis.internal.executor.hash;
import static org.apache.geode.distributed.ConfigurationProperties.MAX_WAIT_TIME_RECONNECT;
import static org.assertj.core.api.Assertions.assertThat;
import java.time.Duration;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.ClassRule;
import org.junit.Test;
import redis.clients.jedis.Jedis;
import org.apache.geode.redis.ConcurrentLoopingThreads;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.rules.MemberVM;
import org.apache.geode.test.dunit.rules.RedisClusterStartupRule;
public class HGetDUnitTest {
@ClassRule
public static RedisClusterStartupRule clusterStartUp = new RedisClusterStartupRule(3);
private static final String LOCAL_HOST = "127.0.0.1";
private static final int ITERATION_COUNT = 500;
private static final int JEDIS_TIMEOUT = Math.toIntExact(GeodeAwaitility.getTimeout().toMillis());
private static Jedis jedis1;
private static Jedis jedis2;
private static Properties locatorProperties;
private static MemberVM locator;
private static MemberVM server1;
private static MemberVM server2;
private static int redisServerPort1;
private static int redisServerPort2;
@BeforeClass
public static void classSetup() {
locatorProperties = new Properties();
locatorProperties.setProperty(MAX_WAIT_TIME_RECONNECT, "15000");
locator = clusterStartUp.startLocatorVM(0, locatorProperties);
server1 = clusterStartUp.startRedisVM(1, locator.getPort());
server2 = clusterStartUp.startRedisVM(2, locator.getPort());
redisServerPort1 = clusterStartUp.getRedisPort(1);
redisServerPort2 = clusterStartUp.getRedisPort(2);
jedis1 = new Jedis(LOCAL_HOST, redisServerPort1, JEDIS_TIMEOUT);
jedis2 = new Jedis(LOCAL_HOST, redisServerPort2, JEDIS_TIMEOUT);
}
@Before
public void testSetup() {
jedis1.flushAll();
}
@AfterClass
public static void tearDown() {
jedis1.disconnect();
jedis2.disconnect();
server1.stop();
server2.stop();
}
@Test
public void hgetReturnsNewValues_whenPuttingValues() {
String key = "key";
Map<String, String> expectedMap = new HashMap<>();
new ConcurrentLoopingThreads(ITERATION_COUNT,
(i) -> {
String field = "field-" + i;
String value = "value-" + i;
jedis1.hset(key, field, value);
expectedMap.put(field, value);
},
(i) -> GeodeAwaitility.await().atMost(Duration.ofSeconds(1)).untilAsserted(
() -> assertThat(jedis2.hget(key, "field-" + i)).isEqualTo("value-" + (i))))
.runInLockstep();
assertThat(jedis1.hgetAll(key)).containsExactlyInAnyOrderEntriesOf(expectedMap);
}
}
| GEODE-9069: Increasing the timeout in HGetDUnitTest
* Similar to the previous commit for GEODE-9069, the timeout is increased for HGetDUnitTest
| geode-apis-compatible-with-redis/src/distributedTest/java/org/apache/geode/redis/internal/executor/hash/HGetDUnitTest.java | GEODE-9069: Increasing the timeout in HGetDUnitTest |
|
Java | apache-2.0 | 428a82d59681a9ed1e3fc2e769e8725e37d65e92 | 0 | vanitasvitae/Smack,cjpx00008/Smack,u20024804/Smack,igorexax3mal/Smack,unisontech/Smack,ishan1604/Smack,vito-c/Smack,kkroid/OnechatSmack,esl/Smack,deeringc/Smack,deeringc/Smack,opg7371/Smack,igorexax3mal/Smack,lovely3x/Smack,unisontech/Smack,opg7371/Smack,andrey42/Smack,vito-c/Smack,kkroid/OnechatSmack,annovanvliet/Smack,Flowdalic/Smack,magnetsystems/message-smack,deeringc/Smack,lovely3x/Smack,xuIcream/Smack,ishan1604/Smack,hy9902/Smack,cjpx00008/Smack,ayne/Smack,igniterealtime/Smack,esl/Smack,Flowdalic/Smack,Tibo-lg/Smack,vanitasvitae/smack-omemo,dpr-odoo/Smack,TTalkIM/Smack,u20024804/Smack,vanitasvitae/Smack,vanitasvitae/smack-omemo,TTalkIM/Smack,opg7371/Smack,ishan1604/Smack,magnetsystems/message-smack,mar-v-in/Smack,u20024804/Smack,magnetsystems/message-smack,vanitasvitae/Smack,lovely3x/Smack,TTalkIM/Smack,igorexax3mal/Smack,chuangWu/Smack,mar-v-in/Smack,mar-v-in/Smack,chuangWu/Smack,ayne/Smack,hy9902/Smack,hy9902/Smack,Tibo-lg/Smack,qingsong-xu/Smack,igniterealtime/Smack,chuangWu/Smack,igniterealtime/Smack,xuIcream/Smack,ayne/Smack,cjpx00008/Smack,xuIcream/Smack,andrey42/Smack,qingsong-xu/Smack,esl/Smack,Flowdalic/Smack,dpr-odoo/Smack,vanitasvitae/smack-omemo,Tibo-lg/Smack,annovanvliet/Smack,qingsong-xu/Smack,unisontech/Smack,dpr-odoo/Smack,annovanvliet/Smack,andrey42/Smack,kkroid/OnechatSmack | /**
* $RCSfile$
* $Revision$
* $Date$
*
* Copyright 2003-2004 Jive Software.
*
* All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.packet;
import java.util.*;
import org.jivesoftware.smack.packet.PacketExtension;
/**
* Represents message events relating to the delivery, display, composition and cancellation of
* messages.<p>
*
* There are four message events currently defined in this namespace:
* <ol>
* <li>Offline<br>
* Indicates that the message has been stored offline by the intended recipient's server. This
* event is triggered only if the intended recipient's server supports offline storage, has that
* support enabled, and the recipient is offline when the server receives the message for delivery.</li>
*
* <li>Delivered<br>
* Indicates that the message has been delivered to the recipient. This signifies that the message
* has reached the recipient's XMPP client, but does not necessarily mean that the message has
* been displayed. This event is to be raised by the XMPP client.</li>
*
* <li>Displayed<br>
* Once the message has been received by the recipient's XMPP client, it may be displayed to the
* user. This event indicates that the message has been displayed, and is to be raised by the
* XMPP client. Even if a message is displayed multiple times, this event should be raised only
* once.</li>
*
* <li>Composing<br>
* In threaded chat conversations, this indicates that the recipient is composing a reply to a
* message. The event is to be raised by the recipient's XMPP client. A XMPP client is allowed
* to raise this event multiple times in response to the same request, providing the original
* event is cancelled first.</li>
* </ol>
*
* @author Gaston Dombiak
*/
public class MessageEvent implements PacketExtension {
public static final String OFFLINE = "offline";
public static final String COMPOSING = "composing";
public static final String DISPLAYED = "displayed";
public static final String DELIVERED = "delivered";
public static final String CANCELLED = "cancelled";
private boolean offline = false;
private boolean delivered = false;
private boolean displayed = false;
private boolean composing = false;
private boolean cancelled = true;
private String packetID = null;
/**
* Returns the XML element name of the extension sub-packet root element.
* Always returns "x"
*
* @return the XML element name of the packet extension.
*/
public String getElementName() {
return "x";
}
/**
* Returns the XML namespace of the extension sub-packet root element.
* According the specification the namespace is always "jabber:x:event"
*
* @return the XML namespace of the packet extension.
*/
public String getNamespace() {
return "jabber:x:event";
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the receiver is composing a reply.
* When the message is a notification returns if the receiver of the message is composing a
* reply.
*
* @return true if the sender is requesting to be notified when composing or when notifying
* that the receiver of the message is composing a reply
*/
public boolean isComposing() {
return composing;
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the message is delivered.
* When the message is a notification returns if the message was delivered or not.
*
* @return true if the sender is requesting to be notified when delivered or when notifying
* that the message was delivered
*/
public boolean isDelivered() {
return delivered;
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the message is displayed.
* When the message is a notification returns if the message was displayed or not.
*
* @return true if the sender is requesting to be notified when displayed or when notifying
* that the message was displayed
*/
public boolean isDisplayed() {
return displayed;
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the receiver of the message is offline.
* When the message is a notification returns if the receiver of the message was offline.
*
* @return true if the sender is requesting to be notified when offline or when notifying
* that the receiver of the message is offline
*/
public boolean isOffline() {
return offline;
}
/**
* When the message is a notification returns if the receiver of the message cancelled
* composing a reply.
*
* @return true if the receiver of the message cancelled composing a reply
*/
public boolean isCancelled() {
return cancelled;
}
/**
* Returns the unique ID of the message that requested to be notified of the event.
* The packet id is not used when the message is a request for notifications
*
* @return the message id that requested to be notified of the event.
*/
public String getPacketID() {
return packetID;
}
/**
* Returns the types of events. The type of event could be:
* "offline", "composing","delivered","displayed", "offline"
*
* @return an iterator over all the types of events of the MessageEvent.
*/
public Iterator getEventTypes() {
ArrayList allEvents = new ArrayList();
if (isDelivered()) {
allEvents.add(MessageEvent.DELIVERED);
}
if (!isMessageEventRequest() && isCancelled()) {
allEvents.add(MessageEvent.CANCELLED);
}
if (isComposing()) {
allEvents.add(MessageEvent.COMPOSING);
}
if (isDisplayed()) {
allEvents.add(MessageEvent.DISPLAYED);
}
if (isOffline()) {
allEvents.add(MessageEvent.OFFLINE);
}
return allEvents.iterator();
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the receiver is composing a reply.
* When the message is a notification sets if the receiver of the message is composing a
* reply.
*
* @param composing sets if the sender is requesting to be notified when composing or when
* notifying that the receiver of the message is composing a reply
*/
public void setComposing(boolean composing) {
this.composing = composing;
setCancelled(false);
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the message is delivered.
* When the message is a notification sets if the message was delivered or not.
*
* @param delivered sets if the sender is requesting to be notified when delivered or when
* notifying that the message was delivered
*/
public void setDelivered(boolean delivered) {
this.delivered = delivered;
setCancelled(false);
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the message is displayed.
* When the message is a notification sets if the message was displayed or not.
*
* @param displayed sets if the sender is requesting to be notified when displayed or when
* notifying that the message was displayed
*/
public void setDisplayed(boolean displayed) {
this.displayed = displayed;
setCancelled(false);
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the receiver of the message is offline.
* When the message is a notification sets if the receiver of the message was offline.
*
* @param offline sets if the sender is requesting to be notified when offline or when
* notifying that the receiver of the message is offline
*/
public void setOffline(boolean offline) {
this.offline = offline;
setCancelled(false);
}
/**
* When the message is a notification sets if the receiver of the message cancelled
* composing a reply.
* The Cancelled event is never requested explicitly. It is requested implicitly when
* requesting to be notified of the Composing event.
*
* @param cancelled sets if the receiver of the message cancelled composing a reply
*/
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
/**
* Sets the unique ID of the message that requested to be notified of the event.
* The packet id is not used when the message is a request for notifications
*
* @param packetID the message id that requested to be notified of the event.
*/
public void setPacketID(String packetID) {
this.packetID = packetID;
}
/**
* Returns true if this MessageEvent is a request for notifications.
* Returns false if this MessageEvent is a notification of an event.
*
* @return true if this message is a request for notifications.
*/
public boolean isMessageEventRequest() {
return this.packetID == null;
}
/**
* Returns the XML representation of a Message Event according the specification.
*
* Usually the XML representation will be inside of a Message XML representation like
* in the following examples:<p>
*
* Request to be notified when displayed:
* <pre>
* <message
* to='[email protected]/orchard'
* from='[email protected]/balcony'
* id='message22'>
* <x xmlns='jabber:x:event'>
* <displayed/>
* </x>
* </message>
* </pre>
*
* Notification of displayed:
* <pre>
* <message
* from='[email protected]/orchard'
* to='[email protected]/balcony'>
* <x xmlns='jabber:x:event'>
* <displayed/>
* <id>message22</id>
* </x>
* </message>
* </pre>
*
*/
public String toXML() {
StringBuffer buf = new StringBuffer();
buf.append("<").append(getElementName()).append(" xmlns=\"").append(getNamespace()).append(
"\">");
// Note: Cancellation events don't specify any tag. They just send the packetID
// Add the offline tag if the sender requests to be notified of offline events or if
// the target is offline
if (isOffline())
buf.append("<").append(MessageEvent.OFFLINE).append("/>");
// Add the delivered tag if the sender requests to be notified when the message is
// delivered or if the target notifies that the message has been delivered
if (isDelivered())
buf.append("<").append(MessageEvent.DELIVERED).append("/>");
// Add the displayed tag if the sender requests to be notified when the message is
// displayed or if the target notifies that the message has been displayed
if (isDisplayed())
buf.append("<").append(MessageEvent.DISPLAYED).append("/>");
// Add the composing tag if the sender requests to be notified when the target is
// composing a reply or if the target notifies that he/she is composing a reply
if (isComposing())
buf.append("<").append(MessageEvent.COMPOSING).append("/>");
// Add the id tag only if the MessageEvent is a notification message (not a request)
if (getPacketID() != null)
buf.append("<id>").append(getPacketID()).append("</id>");
buf.append("</").append(getElementName()).append(">");
return buf.toString();
}
}
| source/org/jivesoftware/smackx/packet/MessageEvent.java | /**
* $RCSfile$
* $Revision$
* $Date$
*
* Copyright 2003-2004 Jive Software.
*
* All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jivesoftware.smackx.packet;
import java.util.*;
import org.jivesoftware.smack.packet.PacketExtension;
/**
* Represents message events relating to the delivery, display, composition and cancellation of
* messages.<p>
*
* There are four message events currently defined in this namespace:
* <ol>
* <li>Offline<br>
* Indicates that the message has been stored offline by the intended recipient's server. This
* event is triggered only if the intended recipient's server supports offline storage, has that
* support enabled, and the recipient is offline when the server receives the message for delivery.</li>
*
* <li>Delivered<br>
* Indicates that the message has been delivered to the recipient. This signifies that the message
* has reached the recipient's XMPP client, but does not necessarily mean that the message has
* been displayed. This event is to be raised by the XMPP client.</li>
*
* <li>Displayed<br>
* Once the message has been received by the recipient's XMPP client, it may be displayed to the
* user. This event indicates that the message has been displayed, and is to be raised by the
* XMPP client. Even if a message is displayed multiple times, this event should be raised only
* once.</li>
*
* <li>Composing<br>
* In threaded chat conversations, this indicates that the recipient is composing a reply to a
* message. The event is to be raised by the recipient's XMPP client. A XMPP client is allowed
* to raise this event multiple times in response to the same request, providing the original
* event is cancelled first.</li>
* </ol>
*
* @author Gaston Dombiak
*/
public class MessageEvent implements PacketExtension {
public static final String OFFLINE = "offline";
public static final String COMPOSING = "composing";
public static final String DISPLAYED = "displayed";
public static final String DELIVERED = "delivered";
public static final String CANCELLED = "cancelled";
private boolean offline = false;
private boolean delivered = false;
private boolean displayed = false;
private boolean composing = false;
private boolean cancelled = true;
private String packetID = null;
/**
* Returns the XML element name of the extension sub-packet root element.
* Always returns "x"
*
* @return the XML element name of the packet extension.
*/
public String getElementName() {
return "x";
}
/**
* Returns the XML namespace of the extension sub-packet root element.
* According the specification the namespace is always "jabber:x:event"
*
* @return the XML namespace of the packet extension.
*/
public String getNamespace() {
return "jabber:x:event";
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the receiver is composing a reply.
* When the message is a notification returns if the receiver of the message is composing a
* reply.
*
* @return true if the sender is requesting to be notified when composing or when notifying
* that the receiver of the message is composing a reply
*/
public boolean isComposing() {
return composing;
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the message is delivered.
* When the message is a notification returns if the message was delivered or not.
*
* @return true if the sender is requesting to be notified when delivered or when notifying
* that the message was delivered
*/
public boolean isDelivered() {
return delivered;
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the message is displayed.
* When the message is a notification returns if the message was displayed or not.
*
* @return true if the sender is requesting to be notified when displayed or when notifying
* that the message was displayed
*/
public boolean isDisplayed() {
return displayed;
}
/**
* When the message is a request returns if the sender of the message requests to be notified
* when the receiver of the message is offline.
* When the message is a notification returns if the receiver of the message was offline.
*
* @return true if the sender is requesting to be notified when offline or when notifying
* that the receiver of the message is offline
*/
public boolean isOffline() {
return offline;
}
/**
* When the message is a notification returns if the receiver of the message cancelled
* composing a reply.
*
* @return true if the receiver of the message cancelled composing a reply
*/
public boolean isCancelled() {
return cancelled;
}
/**
* Returns the unique ID of the message that requested to be notified of the event.
* The packet id is not used when the message is a request for notifications
*
* @return the message id that requested to be notified of the event.
*/
public String getPacketID() {
return packetID;
}
/**
* Returns the types of events. The type of event could be:
* "offline", "composing","delivered","displayed", "offline"
*
* @return an iterator over all the types of events of the MessageEvent.
*/
public Iterator getEventTypes() {
ArrayList allEvents = new ArrayList();
if (isDelivered()) {
allEvents.add(MessageEvent.DELIVERED);
}
if (isCancelled()) {
allEvents.add(MessageEvent.CANCELLED);
}
if (isComposing()) {
allEvents.add(MessageEvent.COMPOSING);
}
if (isDisplayed()) {
allEvents.add(MessageEvent.DISPLAYED);
}
if (isOffline()) {
allEvents.add(MessageEvent.OFFLINE);
}
return allEvents.iterator();
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the receiver is composing a reply.
* When the message is a notification sets if the receiver of the message is composing a
* reply.
*
* @param composing sets if the sender is requesting to be notified when composing or when
* notifying that the receiver of the message is composing a reply
*/
public void setComposing(boolean composing) {
this.composing = composing;
setCancelled(false);
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the message is delivered.
* When the message is a notification sets if the message was delivered or not.
*
* @param delivered sets if the sender is requesting to be notified when delivered or when
* notifying that the message was delivered
*/
public void setDelivered(boolean delivered) {
this.delivered = delivered;
setCancelled(false);
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the message is displayed.
* When the message is a notification sets if the message was displayed or not.
*
* @param displayed sets if the sender is requesting to be notified when displayed or when
* notifying that the message was displayed
*/
public void setDisplayed(boolean displayed) {
this.displayed = displayed;
setCancelled(false);
}
/**
* When the message is a request sets if the sender of the message requests to be notified
* when the receiver of the message is offline.
* When the message is a notification sets if the receiver of the message was offline.
*
* @param offline sets if the sender is requesting to be notified when offline or when
* notifying that the receiver of the message is offline
*/
public void setOffline(boolean offline) {
this.offline = offline;
setCancelled(false);
}
/**
* When the message is a notification sets if the receiver of the message cancelled
* composing a reply.
* The Cancelled event is never requested explicitly. It is requested implicitly when
* requesting to be notified of the Composing event.
*
* @param cancelled sets if the receiver of the message cancelled composing a reply
*/
public void setCancelled(boolean cancelled) {
this.cancelled = cancelled;
}
/**
* Sets the unique ID of the message that requested to be notified of the event.
* The packet id is not used when the message is a request for notifications
*
* @param packetID the message id that requested to be notified of the event.
*/
public void setPacketID(String packetID) {
this.packetID = packetID;
}
/**
* Returns true if this MessageEvent is a request for notifications.
* Returns false if this MessageEvent is a notification of an event.
*
* @return true if this message is a request for notifications.
*/
public boolean isMessageEventRequest() {
return this.packetID == null;
}
/**
* Returns the XML representation of a Message Event according the specification.
*
* Usually the XML representation will be inside of a Message XML representation like
* in the following examples:<p>
*
* Request to be notified when displayed:
* <pre>
* <message
* to='[email protected]/orchard'
* from='[email protected]/balcony'
* id='message22'>
* <x xmlns='jabber:x:event'>
* <displayed/>
* </x>
* </message>
* </pre>
*
* Notification of displayed:
* <pre>
* <message
* from='[email protected]/orchard'
* to='[email protected]/balcony'>
* <x xmlns='jabber:x:event'>
* <displayed/>
* <id>message22</id>
* </x>
* </message>
* </pre>
*
*/
public String toXML() {
StringBuffer buf = new StringBuffer();
buf.append("<").append(getElementName()).append(" xmlns=\"").append(getNamespace()).append(
"\">");
// Note: Cancellation events don't specify any tag. They just send the packetID
// Add the offline tag if the sender requests to be notified of offline events or if
// the target is offline
if (isOffline())
buf.append("<").append(MessageEvent.OFFLINE).append("/>");
// Add the delivered tag if the sender requests to be notified when the message is
// delivered or if the target notifies that the message has been delivered
if (isDelivered())
buf.append("<").append(MessageEvent.DELIVERED).append("/>");
// Add the displayed tag if the sender requests to be notified when the message is
// displayed or if the target notifies that the message has been displayed
if (isDisplayed())
buf.append("<").append(MessageEvent.DISPLAYED).append("/>");
// Add the composing tag if the sender requests to be notified when the target is
// composing a reply or if the target notifies that he/she is composing a reply
if (isComposing())
buf.append("<").append(MessageEvent.COMPOSING).append("/>");
// Add the id tag only if the MessageEvent is a notification message (not a request)
if (getPacketID() != null)
buf.append("<id>").append(getPacketID()).append("</id>");
buf.append("</").append(getElementName()).append(">");
return buf.toString();
}
}
| Make sure that CANCELLED event type is not presence when message event is a request.
git-svn-id: 0d4d1bf2e47502aa0f0957c230a0ec7fa56198fd@3043 b35dd754-fafc-0310-a699-88a17e54d16e
| source/org/jivesoftware/smackx/packet/MessageEvent.java | Make sure that CANCELLED event type is not presence when message event is a request. |
|
Java | apache-2.0 | e651e489c81b917be6ddaf8f8ec39cf07696374d | 0 | baszero/yanel,wyona/yanel,wyona/yanel,wyona/yanel,wyona/yanel,baszero/yanel,baszero/yanel,baszero/yanel,wyona/yanel,baszero/yanel,baszero/yanel,wyona/yanel | package org.wyona.yanel.servlet;
import java.io.File;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.Writer;
import java.net.URL;
import java.util.Enumeration;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamSource;
import org.wyona.yanel.core.Path;
import org.wyona.yanel.core.Resource;
import org.wyona.yanel.core.ResourceTypeDefinition;
import org.wyona.yanel.core.ResourceTypeIdentifier;
import org.wyona.yanel.core.ResourceTypeRegistry;
import org.wyona.yanel.core.Yanel;
import org.wyona.yanel.core.api.attributes.ModifiableV1;
import org.wyona.yanel.core.api.attributes.ModifiableV2;
import org.wyona.yanel.core.api.attributes.VersionableV2;
import org.wyona.yanel.core.api.attributes.ViewableV1;
import org.wyona.yanel.core.api.attributes.ViewableV2;
import org.wyona.yanel.core.attributes.viewable.View;
import org.wyona.yanel.core.navigation.Node;
import org.wyona.yanel.core.navigation.Sitetree;
import org.wyona.yanel.core.map.Map;
import org.wyona.yanel.core.map.Realm;
import org.wyona.yanel.servlet.CreateUsecaseHelper;
import org.wyona.yanel.servlet.communication.HttpRequest;
import org.wyona.yanel.servlet.communication.HttpResponse;
import org.wyona.yanel.util.ResourceAttributeHelper;
import org.wyona.security.core.AuthenticationException;
import org.wyona.security.core.api.Identity;
import org.wyona.security.core.api.IdentityManager;
import org.wyona.security.core.api.PolicyManager;
import org.wyona.security.core.api.Role;
import org.apache.log4j.Category;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
*
*/
public class YanelServlet extends HttpServlet {
private static Category log = Category.getInstance(YanelServlet.class);
private ServletConfig config;
ResourceTypeRegistry rtr;
//PolicyManager pm;
//IdentityManager im;
Map map;
Yanel yanel;
Sitetree sitetree;
File xsltInfoAndException;
File xsltLoginScreen;
private static String IDENTITY_KEY = "identity";
private static String NAMESPACE = "http://www.wyona.org/yanel/1.0";
private static final String METHOD_PROPFIND = "PROPFIND";
private static final String METHOD_OPTIONS = "OPTIONS";
private static final String METHOD_GET = "GET";
private static final String METHOD_POST = "POST";
private static final String METHOD_PUT = "PUT";
private static final String METHOD_DELETE = "DELETE";
private String sslPort = null;
/**
*
*/
public void init(ServletConfig config) throws ServletException {
this.config = config;
xsltInfoAndException = org.wyona.commons.io.FileUtil.file(config.getServletContext().getRealPath("/"), config.getInitParameter("exception-and-info-screen-xslt"));
xsltLoginScreen = org.wyona.commons.io.FileUtil.file(config.getServletContext().getRealPath("/"), config.getInitParameter("login-screen-xslt"));
try {
yanel = Yanel.getInstance();
yanel.init();
rtr = yanel.getResourceTypeRegistry();
map = (Map) yanel.getBeanFactory().getBean("map");
sitetree = (Sitetree) yanel.getBeanFactory().getBean("nav-sitetree");
sslPort = config.getInitParameter("ssl-port");
} catch (Exception e) {
log.error(e);
throw new ServletException(e.getMessage(), e);
}
}
/**
*
*/
public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String httpAcceptMediaTypes = request.getHeader("Accept");
log.debug("HTTP Accept Media Types: " + httpAcceptMediaTypes);
String httpUserAgent = request.getHeader("User-Agent");
log.debug("HTTP User Agent: " + httpUserAgent);
String httpAcceptLanguage = request.getHeader("Accept-Language");
log.debug("HTTP Accept Language: " + httpAcceptLanguage);
// Logout from Yanel
String yanelUsecase = request.getParameter("yanel.usecase");
if(yanelUsecase != null && yanelUsecase.equals("logout")) {
if(doLogout(request, response) != null) return;
}
// Authentication
if(doAuthenticate(request, response) != null) return;
// Check authorization
if(doAuthorize(request, response) != null) return;
// Delegate ...
String method = request.getMethod();
if (method.equals(METHOD_PROPFIND)) {
doPropfind(request, response);
} else if (method.equals(METHOD_GET)) {
doGet(request, response);
} else if (method.equals(METHOD_POST)) {
doPost(request, response);
} else if (method.equals(METHOD_PUT)) {
doPut(request, response);
} else if (method.equals(METHOD_DELETE)) {
doDelete(request, response);
} else if (method.equals(METHOD_OPTIONS)) {
doOptions(request, response);
} else {
log.error("No such method implemented: " + method);
response.sendError(response.SC_NOT_IMPLEMENTED);
}
}
/**
*
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// Check if a new resource shall be created ...
String yanelUsecase = request.getParameter("yanel.usecase");
if(yanelUsecase != null && yanelUsecase.equals("create")) {
CreateUsecaseHelper creator = new CreateUsecaseHelper();
creator.create(request, response, yanel);
return;
}
getContent(request, response);
}
/**
*
*/
private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
View view = null;
org.w3c.dom.Document doc = null;
javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
try {
javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder();
org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation();
org.w3c.dom.DocumentType doctype = null;
doc = impl.createDocument(NAMESPACE, "yanel", doctype);
} catch(Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage());
}
Element rootElement = doc.getDocumentElement();
String servletContextRealPath = config.getServletContext().getRealPath("/");
rootElement.setAttribute("servlet-context-real-path", servletContextRealPath);
//log.deubg("servletContextRealPath: " + servletContextRealPath);
//log.debug("contextPath: " + request.getContextPath());
//log.debug("servletPath: " + request.getServletPath());
Element requestElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "request"));
requestElement.setAttributeNS(NAMESPACE, "uri", request.getRequestURI());
requestElement.setAttributeNS(NAMESPACE, "servlet-path", request.getServletPath());
HttpSession session = request.getSession(true);
Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session"));
sessionElement.setAttribute("id", session.getId());
Enumeration attrNames = session.getAttributeNames();
if (!attrNames.hasMoreElements()) {
Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes"));
}
while (attrNames.hasMoreElements()) {
String name = (String)attrNames.nextElement();
String value = session.getAttribute(name).toString();
Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute"));
sessionAttributeElement.setAttribute("name", name);
sessionAttributeElement.appendChild(doc.createTextNode(value));
}
Realm realm;
Path path;
ResourceTypeIdentifier rti;
try {
realm = map.getRealm(request.getServletPath());
path = map.getPath(realm, request.getServletPath());
rti = yanel.getResourceManager().getResourceTypeIdentifier(realm, path);
} catch (Exception e) {
String message = "URL could not be mapped to realm/path " + e.getMessage();
log.error(message, e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
//String rti = map.getResourceTypeIdentifier(new Path(request.getServletPath()));
Resource res = null;
long lastModified = -1;
long size = -1;
if (rti != null) {
ResourceTypeDefinition rtd = rtr.getResourceTypeDefinition(rti.getUniversalName());
if (rtd == null) {
String message = "No such resource type registered: " + rti.getUniversalName() + ", check " + rtr.getConfigurationFile();
log.error(message);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
Element rtiElement = (Element) rootElement.appendChild(doc.createElement("resource-type-identifier"));
rtiElement.setAttribute("namespace", rtd.getResourceTypeNamespace());
rtiElement.setAttribute("local-name", rtd.getResourceTypeLocalName());
try {
HttpRequest httpRequest = new HttpRequest(request);
HttpResponse httpResponse = new HttpResponse(response);
res = yanel.getResourceManager().getResource(httpRequest, httpResponse, realm, path, rtd, rti);
if (res != null) {
Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource"));
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) {
log.info("Resource is viewable V1");
Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view"));
viewElement.appendChild(doc.createTextNode("View Descriptors: " + ((ViewableV1) res).getViewDescriptors()));
String viewId = request.getParameter("yanel.resource.viewid");
try {
view = ((ViewableV1) res).getView(request, viewId);
} catch(org.wyona.yarep.core.NoSuchNodeException e) {
// TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ...
String message = "No such node exception: " + e;
log.warn(e);
//log.error(e.getMessage(), e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
exceptionElement.setAttribute("status", "404");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND);
setYanelOutput(request, response, doc);
return;
} catch(Exception e) {
log.error(e.getMessage(), e);
String message = e.toString();
log.error(e.getMessage(), e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
exceptionElement.setAttribute("status", "500");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
setYanelOutput(request, response, doc);
return;
}
} else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "2")) {
log.info("Resource is viewable V2");
String viewId = request.getParameter("yanel.resource.viewid");
Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view"));
viewElement.appendChild(doc.createTextNode("View Descriptors: " + ((ViewableV2) res).getViewDescriptors()));
size = ((ViewableV2) res).getSize();
Element sizeElement = (Element) resourceElement.appendChild(doc.createElement("size"));
sizeElement.appendChild(doc.createTextNode(String.valueOf(size)));
try {
view = ((ViewableV2) res).getView(viewId);
} catch(org.wyona.yarep.core.NoSuchNodeException e) {
// TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ...
String message = "No such node exception: " + e;
log.warn(e);
//log.error(e.getMessage(), e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception"));
exceptionElement.appendChild(doc.createTextNode(message));
exceptionElement.setAttributeNS(NAMESPACE, "status", "404");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND);
setYanelOutput(request, response, doc);
return;
}
} else {
Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("not-viewable"));
noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!"));
}
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) {
lastModified = ((ModifiableV2) res).getLastModified();
Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified"));
lastModifiedElement.appendChild(doc.createTextNode(new java.util.Date(lastModified).toString()));
} else {
Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified"));
}
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) {
// retrieve the revisions, but only in the meta usecase (for performance reasons):
if (request.getParameter("yanel.resource.meta") != null) {
String[] revisions = ((VersionableV2)res).getRevisions();
Element revisionsElement = (Element) resourceElement.appendChild(doc.createElement("revisions"));
if (revisions != null) {
for (int i=0; i<revisions.length; i++) {
Element revisionElement = (Element) revisionsElement.appendChild(doc.createElement("revision"));
revisionElement.appendChild(doc.createTextNode(revisions[i]));
}
} else {
Element noRevisionsYetElement = (Element) resourceElement.appendChild(doc.createElement("no-revisions-yet"));
}
}
} else {
Element notVersionableElement = (Element) resourceElement.appendChild(doc.createElement("not-versionable"));
}
} else {
Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null"));
}
} catch(Exception e) {
log.error(e.getMessage(), e);
String message = e.toString();
Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
} else {
Element noRTIFoundElement = (Element) rootElement.appendChild(doc.createElement("no-resource-type-identifier-found"));
noRTIFoundElement.setAttribute("servlet-path", request.getServletPath());
}
String usecase = request.getParameter("yanel.resource.usecase");
if (usecase != null && usecase.equals("checkout")) {
log.debug("Checkout data ...");
// TODO: Implement checkout ...
log.warn("Acquire lock has not been implemented yet ...!");
// acquireLock();
}
String meta = request.getParameter("yanel.resource.meta");
if (meta != null) {
if (meta.length() > 0) {
log.error("DEBUG: meta length: " + meta.length());
} else {
log.error("DEBUG: Show all meta");
}
response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK);
setYanelOutput(request, response, doc);
return;
}
if (view != null) {
// check if the view contatins the response (otherwise assume that the resource
// wrote the response, and just return).
if (!view.isResponse()) return;
response.setContentType(patchContentType(view.getMimeType(), request));
InputStream is = view.getInputStream();
//BufferedReader reader = new BufferedReader(new InputStreamReader(is));
//String line;
//System.out.println("getContentXML: "+path);
//while ((line = reader.readLine()) != null) System.out.println(line);
byte buffer[] = new byte[8192];
int bytesRead;
if (is != null) {
// TODO: Yarep does not set returned Stream to null resp. is missing Exception Handling for the constructor. Exceptions should be handled here, but rather within Yarep or whatever repositary layer is being used ...
bytesRead = is.read(buffer);
if (bytesRead == -1) {
String message = "InputStream of view does not seem to contain any data!";
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
// TODO: Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag
String ifModifiedSince = request.getHeader("If-Modified-Since");
if (ifModifiedSince != null) {
log.warn("TODO: Implement 304 ...");
}
java.io.OutputStream os = response.getOutputStream();
os.write(buffer, 0, bytesRead);
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified);
return;
} else {
String message = "InputStream of view is null!";
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
}
} else {
String message = "View is null!";
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
}
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
/**
*
*/
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String value = request.getParameter("yanel.resource.usecase");
if (value != null && value.equals("save")) {
log.debug("Save data ...");
save(request, response);
return;
} else if (value != null && value.equals("checkin")) {
log.debug("Checkin data ...");
save(request, response);
// TODO: Implement checkin ...
log.warn("Release lock has not been implemented yet ...");
// releaseLock();
return;
} else {
log.info("No parameter yanel.resource.usecase!");
String contentType = request.getContentType();
if (contentType.indexOf("application/atom+xml") >= 0) {
InputStream in = intercept(request.getInputStream());
// Create new Atom entry
try {
String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>";
org.wyona.yanel.core.map.Realm realm = yanel.getMap().getRealm(request.getServletPath());
Path newEntryPath = yanel.getMap().getPath(realm, request.getServletPath() + "/" + new java.util.Date().getTime() + ".xml");
log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + newEntryPath);
Resource atomEntryResource = yanel.getResourceManager().getResource(request, response, realm, newEntryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new org.wyona.yanel.core.ResourceTypeIdentifier(atomEntryUniversalName, null));
((ModifiableV2)atomEntryResource).write(in);
byte buffer[] = new byte[8192];
int bytesRead;
InputStream resourceIn = ((ModifiableV2)atomEntryResource).getInputStream();
OutputStream responseOut = response.getOutputStream();
while ((bytesRead = resourceIn.read(buffer)) != -1) {
responseOut.write(buffer, 0, bytesRead);
}
// TODO: Fix Location ...
response.setHeader("Location", "http://ulysses.wyona.org" + newEntryPath);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED);
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new IOException(e.getMessage());
}
}
getContent(request, response);
}
}
/**
* HTTP PUT implementation
*/
public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO: Reuse code doPost resp. share code with doPut
String value = request.getParameter("yanel.resource.usecase");
if (value != null && value.equals("save")) {
log.debug("Save data ...");
save(request, response);
return;
} else if (value != null && value.equals("checkin")) {
log.debug("Checkin data ...");
save(request, response);
// TODO: Implement checkin ...
log.warn("Release lock has not been implemented yet ...!");
// releaseLock();
return;
} else {
log.warn("No parameter yanel.resource.usecase!");
String contentType = request.getContentType();
if (contentType.indexOf("application/atom+xml") >= 0) {
InputStream in = intercept(request.getInputStream());
// Overwrite existing atom entry
try {
String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>";
org.wyona.yanel.core.map.Realm realm = yanel.getMap().getRealm(request.getServletPath());
Path entryPath = yanel.getMap().getPath(realm, request.getServletPath());
log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + entryPath);
Resource atomEntryResource = yanel.getResourceManager().getResource(request, response, realm, entryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new org.wyona.yanel.core.ResourceTypeIdentifier(atomEntryUniversalName, null));
// TODO: There seems to be a problem ...
((ModifiableV2)atomEntryResource).write(in);
// NOTE: This method does not update updated date
/*
OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath);
byte buffer[] = new byte[8192];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
*/
log.info("Atom entry has been saved: " + entryPath);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK);
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new IOException(e.getMessage());
}
} else {
save(request, response);
/*
log.warn("TODO: WebDAV PUT ...");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_IMPLEMENTED);
return;
*/
}
}
}
/**
* HTTP DELETE implementation
*/
public void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
Resource res = getResource(request, response);
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) {
if (((ModifiableV2) res).delete()) {
log.debug("Resource has been deleted: " + res);
response.setStatus(response.SC_OK);
return;
} else {
log.warn("Resource could not be deleted: " + res);
response.setStatus(response.SC_FORBIDDEN);
return;
}
} else {
log.error("Resource '" + res + "' has interface ModifiableV2 not implemented." );
response.sendError(response.SC_NOT_IMPLEMENTED);
return;
}
} catch (Exception e) {
log.error("Could not delete resource with URL " + request.getRequestURL() + " " + e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
}
/**
*
*/
private Resource getResource(HttpServletRequest request, HttpServletResponse response) {
try {
Realm realm = map.getRealm(request.getServletPath());
Path path = map.getPath(realm, request.getServletPath());
HttpRequest httpRequest = new HttpRequest(request);
HttpResponse httpResponse = new HttpResponse(response);
Resource res = yanel.getResourceManager().getResource(httpRequest, httpResponse, realm, path);
return res;
} catch(Exception e) {
log.error(e.getMessage(), e);
return null;
}
}
/**
* Save data
*/
private void save(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
log.debug("Save data ...");
InputStream in = request.getInputStream();
java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream();
byte[] buf = new byte[8192];
int bytesR;
while ((bytesR = in.read(buf)) != -1) {
baos.write(buf, 0, bytesR);
}
// Buffer within memory (TODO: Maybe replace with File-buffering ...)
// http://www-128.ibm.com/developerworks/java/library/j-io1/
byte[] memBuffer = baos.toByteArray();
// TODO: Should be delegated to resource type, e.g. <{http://...}xml/>!
// Check on well-formedness ...
String contentType = request.getContentType();
log.debug("Content-Type: " + contentType);
if (contentType.indexOf("application/xml") >= 0 || contentType.indexOf("application/xhtml+xml") >= 0) {
log.info("Check well-formedness ...");
javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance();
try {
javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder();
// TODO: Get log messages into log4j ...
//parser.setErrorHandler(...);
// NOTE: DOCTYPE is being resolved/retrieved (e.g. xhtml schema from w3.org) also
// if isValidating is set to false.
// Hence, for performance and network reasons we use a local catalog ...
// Also see http://www.xml.com/pub/a/2004/03/03/catalogs.html
// resp. http://xml.apache.org/commons/components/resolver/
// TODO: What about a resolver factory?
parser.setEntityResolver(new org.apache.xml.resolver.tools.CatalogResolver());
parser.parse(new java.io.ByteArrayInputStream(memBuffer));
//org.w3c.dom.Document document = parser.parse(new ByteArrayInputStream(memBuffer));
} catch (org.xml.sax.SAXException e) {
log.warn("Data is not well-formed: "+e.getMessage());
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"data-not-well-formed\">");
sb.append("<message>Data is not well-formed: "+e.getMessage()+"</message>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
PrintWriter w = response.getWriter();
w.print(sb);
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">");
//sb.append("<message>" + e.getStackTrace() + "</message>");
//sb.append("<message>" + e.getMessage() + "</message>");
sb.append("<message>" + e + "</message>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
PrintWriter w = response.getWriter();
w.print(sb);
return;
}
log.info("Data seems to be well-formed :-)");
} else {
log.info("No well-formedness check required for content type: " + contentType);
}
java.io.ByteArrayInputStream memIn = new java.io.ByteArrayInputStream(memBuffer);
// IMPORTANT TODO: Use ModifiableV2.write(InputStream in) such that resource can modify data during saving resp. check if getOutputStream is equals null and then use write ....
OutputStream out = null;
Resource res = getResource(request, response);
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) {
out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath()));
write(memIn, out, request, response);
return;
} else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) {
try {
out = ((ModifiableV2) res).getOutputStream();
if (out != null) {
write(memIn, out, request, response);
} else {
((ModifiableV2) res).write(memIn);
}
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
} else {
String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!";
log.warn(message);
StringBuffer sb = new StringBuffer();
// TODO: Differentiate between Neutron based and other clients ...
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">");
sb.append("<message>" + message + "</message>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
PrintWriter w = response.getWriter();
w.print(sb);
return;
}
}
/**
* Authorize request (and also authenticate for HTTP BASIC)
*/
private HttpServletResponse doAuthorize(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Role role = null;
// TODO: Replace hardcoded roles by mapping between roles amd query strings ...
String value = request.getParameter("yanel.resource.usecase");
String contentType = request.getContentType();
String method = request.getMethod();
if (value != null && value.equals("save")) {
log.debug("Save data ...");
role = new Role("write");
} else if (value != null && value.equals("checkin")) {
log.debug("Checkin data ...");
role = new Role("write");
} else if (value != null && value.equals("checkout")) {
log.debug("Checkout data ...");
role = new Role("open");
} else if (contentType != null && contentType.indexOf("application/atom+xml") >= 0 && (method.equals(METHOD_PUT) || method.equals(METHOD_POST))) {
// TODO: Is posting atom entries different from a general post (see below)?!
log.error("DEBUG: Write/Checkin Atom entry ...");
role = new Role("write");
} else if (method.equals(METHOD_PUT) || method.equals(METHOD_POST)) {
log.error("DEBUG: Upload data ...");
role = new Role("write");
} else if (method.equals(METHOD_DELETE)) {
log.error("DEBUG: Delete resource ...");
role = new Role("delete");
} else {
log.debug("Role will be 'view'!");
role = new Role("view");
}
value = request.getParameter("yanel.usecase");
if (value != null && value.equals("create")) {
log.debug("Create new resource ...");
role = new Role("create");
}
boolean authorized = false;
Realm realm;
Path path;
try {
realm = map.getRealm(request.getServletPath());
path = map.getPath(realm, request.getServletPath());
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
// HTTP BASIC Authorization (For clients such as for instance Sunbird, OpenOffice or cadaver)
// IMPORT NOTE: BASIC Authentication needs to be checked on every request, because clients often do not support session handling
String authorization = request.getHeader("Authorization");
log.debug("Checking for Authorization Header: " + authorization);
if (authorization != null) {
if (authorization.toUpperCase().startsWith("BASIC")) {
log.debug("Using BASIC authorization ...");
// Get encoded user and password, comes after "BASIC "
String userpassEncoded = authorization.substring(6);
// Decode it, using any base 64 decoder
sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder();
String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded));
log.debug("Username and Password Decoded: " + userpassDecoded);
String[] up = userpassDecoded.split(":");
String username = up[0];
String password = up[1];
log.debug("username: " + username + ", password: " + password);
try {
if (realm.getIdentityManager().authenticate(username, password)) {
authorized = realm.getPolicyManager().authorize(path, new Identity(username, null), new Role("view"));
if(authorized) {
return null;
} else {
log.warn("HTTP BASIC Authorization failed for " + username + "!");
response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\"");
response.sendError(response.SC_UNAUTHORIZED);
PrintWriter writer = response.getWriter();
writer.print("BASIC Authorization Failed!");
return response;
}
} else {
log.warn("HTTP BASIC Authentication failed for " + username + "!");
response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\"");
response.sendError(response.SC_UNAUTHORIZED);
PrintWriter writer = response.getWriter();
writer.print("BASIC Authentication Failed!");
return response;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
} else if (authorization.toUpperCase().startsWith("DIGEST")) {
log.error("DIGEST is not implemented");
authorized = false;
response.sendError(response.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "DIGEST realm=\"" + realm.getName() + "\"");
PrintWriter writer = response.getWriter();
writer.print("DIGEST is not implemented!");
return response;
} else {
log.warn("No such authorization implemented resp. handled by session based authorization: " + authorization);
authorized = false;
}
}
// Custom Authorization
log.debug("Do session based custom authorization");
//String[] groupnames = {"null", "null"};
HttpSession session = request.getSession(true);
Identity identity = (Identity) session.getAttribute(IDENTITY_KEY);
if (identity == null) {
log.debug("Identity is WORLD");
identity = new Identity();
}
//authorized = pm.authorize(new org.wyona.commons.io.Path(request.getServletPath()), identity, role);
try {
log.debug("Check authorization: realm: " + realm + ", path: " + path + ", identity: " + identity.getUsername() + ", role: " + role.getName());
authorized = realm.getPolicyManager().authorize(path, identity, role);
log.debug("Check authorization result: " + authorized);
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
if(!authorized) {
log.warn("Access denied: " + getRequestURLQS(request, null, false));
if(!request.isSecure()) {
if(sslPort != null) {
log.info("Redirect to SSL ...");
try {
URL url = new URL(getRequestURLQS(request, null, false).toString());
url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile());
response.setHeader("Location", url.toString());
// TODO: Yulup has a bug re TEMPORARY_REDIRECT
//response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY);
return response;
} catch (Exception e) {
log.error(e);
}
} else {
log.warn("SSL does not seem to be configured!");
}
}
// TODO: Shouldn't this be here instead at the beginning of service() ...?
//if(doAuthenticate(request, response) != null) return response;
// Check if this is a neutron request, a Sunbird/Calendar request or just a common GET request
// Also see e-mail about recognizing a WebDAV request: http://lists.w3.org/Archives/Public/w3c-dist-auth/2006AprJun/0064.html
StringBuffer sb = new StringBuffer("");
String neutronVersions = request.getHeader("Neutron");
String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate");
if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) {
log.debug("Neutron Versions supported by client: " + neutronVersions);
log.debug("Authentication Scheme supported by client: " + clientSupportedAuthScheme);
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"authorization\">");
sb.append("<message>Authorization denied: " + getRequestURLQS(request, null, true) + "</message>");
sb.append("<authentication>");
sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>");
//TODO: Also support https ...
sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">");
sb.append("<form>");
sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>");
sb.append("<param description=\"Username\" name=\"username\"/>");
sb.append("<param description=\"Password\" name=\"password\"/>");
sb.append("</form>");
sb.append("</login>");
// NOTE: Needs to be a full URL, because user might switch the server ...
sb.append("<logout url=\"" + getRequestURLQS(request, "yanel.usecase=logout", true) + "\" realm=\"" + realm.getName() + "\"/>");
sb.append("</authentication>");
sb.append("</exception>");
log.debug("Neutron-Auth response: " + sb);
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "NEUTRON-AUTH");
PrintWriter w = response.getWriter();
w.print(sb);
} else if (request.getRequestURI().endsWith(".ics")) {
log.warn("Somebody seems to ask for a Calendar (ICS) ...");
response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\"");
response.sendError(response.SC_UNAUTHORIZED);
} else {
getXHTMLAuthenticationForm(request, response, realm, null);
}
return response;
} else {
log.info("Access granted: " + getRequestURLQS(request, null, false));
return null;
}
}
/**
*
*/
private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) {
//Realm realm = map.getRealm(new Path(request.getServletPath()));
try {
Realm realm = map.getRealm(request.getServletPath());
// TODO: Handle this exception more gracefully!
if (realm == null) log.error("No realm found for path " + new Path(request.getServletPath()));
String proxyHostName = realm.getProxyHostName();
String proxyPort = realm.getProxyPort();
String proxyPrefix = realm.getProxyPrefix();
URL url = null;
url = new URL(request.getRequestURL().toString());
if (proxyHostName != null) {
url = new URL(url.getProtocol(), proxyHostName, url.getPort(), url.getFile());
}
if (proxyPort != null) {
if (proxyPort.length() > 0) {
url = new URL(url.getProtocol(), url.getHost(), new Integer(proxyPort).intValue(), url.getFile());
} else {
url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile());
}
}
if (proxyPrefix != null) {
url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length()));
}
if(proxyHostName != null || proxyPort != null || proxyPrefix != null) {
log.debug("Proxy enabled request: " + url);
}
String urlQS = url.toString();
if (request.getQueryString() != null) {
urlQS = urlQS + "?" + request.getQueryString();
if (addQS != null) urlQS = urlQS + "&" + addQS;
} else {
if (addQS != null) urlQS = urlQS + "?" + addQS;
}
if (xml) urlQS = urlQS.replaceAll("&", "&");
log.debug("Request: " + urlQS);
return urlQS;
} catch (Exception e) {
log.error(e);
return null;
}
}
/**
* Also see https://svn.apache.org/repos/asf/tomcat/container/branches/tc5.0.x/catalina/src/share/org/apache/catalina/servlets/WebdavServlet.java
* Also maybe interesting http://sourceforge.net/projects/openharmonise
*/
public void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Resource resource = getResource(request, response);
//Node node = resource.getRealm().getSitetree().getNode(resource.getPath());
Node node = sitetree.getNode(resource.getRealm(),resource.getPath());
String depth = request.getHeader("Depth");
StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>");
sb.append("<multistatus xmlns=\"DAV:\">");
if (depth.equals("0")) {
if (node.isCollection()) {
sb.append(" <response>");
sb.append(" <href>"+request.getRequestURI()+"</href>");
sb.append(" <propstat>");
sb.append(" <prop>");
sb.append(" <resourcetype><collection/></resourcetype>");
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>");
sb.append(" </prop>");
sb.append(" <status>HTTP/1.1 200 OK</status>");
sb.append(" </propstat>");
sb.append(" </response>");
} else if (node.isResource()) {
sb.append(" <response>");
sb.append(" <href>"+request.getRequestURI()+"</href>");
sb.append(" <propstat>");
sb.append(" <prop>");
sb.append(" <resourcetype/>");
// TODO: Does getcontenttype also be set for resources?
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>");
sb.append(" </prop>");
sb.append(" <status>HTTP/1.1 200 OK</status>");
sb.append(" </propstat>");
sb.append(" </response>");
} else {
log.error("Neither collection nor resource!");
}
} else if (depth.equals("1")) {
Node[] children = node.getChildren();
if (children != null) {
for (int i = 0; i < children.length; i++) {
if (children[i].isCollection()) {
sb.append(" <response>\n");
sb.append(" <href>" + request.getRequestURI() + children[i].getPath() + "/</href>\n");
sb.append(" <propstat>\n");
sb.append(" <prop>\n");
sb.append(" <displayname>A Directory</displayname>\n");
sb.append(" <resourcetype><collection/></resourcetype>\n");
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>\n");
sb.append(" </prop>\n");
sb.append(" <status>HTTP/1.1 200 OK</status>\n");
sb.append(" </propstat>\n");
sb.append(" </response>\n");
} else if(children[i].isResource()) {
sb.append(" <response>\n");
sb.append(" <href>"+request.getRequestURI()+children[i].getPath()+"</href>\n");
sb.append(" <propstat>\n");
sb.append(" <prop>\n");
sb.append(" <displayname>A File</displayname>\n");
sb.append(" <resourcetype/>\n");
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>\n");
sb.append(" </prop>\n");
sb.append(" <status>HTTP/1.1 200 OK</status>\n");
sb.append(" </propstat>\n");
sb.append(" </response>\n");
} else {
log.error("Neither collection nor resource: " + children[i].getPath());
}
}
} else {
log.warn("No children!");
}
} else if (depth.equals("infinity")) {
log.warn("TODO: List children and their children and their children ...");
} else {
log.error("No such depth: " + depth);
}
sb.append("</multistatus>");
//response.setStatus(javax.servlet.http.HttpServletResponse.SC_MULTI_STATUS);
response.setStatus(207, "Multi-Status");
PrintWriter w = response.getWriter();
w.print(sb);
}
/**
*
*/
public void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setHeader("DAV", "1");
// TODO: Is there anything else to do?!
}
/**
* Authentication
* @return null when authentication successful, otherwise return response
*/
public HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
Realm realm = map.getRealm(request.getServletPath());
Path path = map.getPath(realm, request.getServletPath());
//Realm realm = map.getRealm(new Path(request.getServletPath()));
// HTML Form based authentication
String loginUsername = request.getParameter("yanel.login.username");
if(loginUsername != null) {
HttpSession session = request.getSession(true);
try {
if (realm.getIdentityManager().authenticate(loginUsername, request.getParameter("yanel.login.password"))) {
log.debug("Realm: " + realm);
session.setAttribute(IDENTITY_KEY, new Identity(loginUsername, null));
return null;
} else {
log.warn("Login failed: " + loginUsername);
getXHTMLAuthenticationForm(request, response, realm, "Login failed!");
return response;
}
} catch (Exception e) {
log.warn("Login failed: " + loginUsername + " " + e);
getXHTMLAuthenticationForm(request, response, realm, "Login failed!");
return response;
}
}
// Neutron-Auth based authentication
String yanelUsecase = request.getParameter("yanel.usecase");
if(yanelUsecase != null && yanelUsecase.equals("neutron-auth")) {
log.debug("Neutron Authentication ...");
String username = null;
String password = null;
String originalRequest = null;
DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
try {
Configuration config = builder.build(request.getInputStream());
Configuration originalRequestConfig = config.getChild("original-request");
originalRequest = originalRequestConfig.getAttribute("url", null);
Configuration[] paramConfig = config.getChildren("param");
for (int i = 0; i < paramConfig.length; i++) {
String paramName = paramConfig[i].getAttribute("name", null);
if (paramName != null) {
if (paramName.equals("username")) {
username = paramConfig[i].getValue();
} else if (paramName.equals("password")) {
password = paramConfig[i].getValue();
}
}
}
} catch(Exception e) {
log.warn(e);
}
log.debug("Username: " + username);
if (username != null) {
HttpSession session = request.getSession(true);
log.debug("Realm ID: " + realm.getID());
if (realm.getIdentityManager().authenticate(username, password)) {
log.info("Authentication successful: " + username);
session.setAttribute(IDENTITY_KEY, new Identity(username, null));
// TODO: send some XML content, e.g. <authentication-successful/>
response.setContentType("text/plain");
response.setStatus(response.SC_OK);
PrintWriter writer = response.getWriter();
writer.print("Neutron Authentication Successful!");
return response;
} else {
log.warn("Neutron Authentication failed: " + username);
// TODO: Refactor this code with the one from doAuthenticate ...
log.debug("Original Request: " + originalRequest);
StringBuffer sb = new StringBuffer("");
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"authentication\">");
sb.append("<message>Authentication failed!</message>");
sb.append("<authentication>");
// TODO: ...
sb.append("<original-request url=\"" + originalRequest + "\"/>");
//sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>");
//TODO: Also support https ...
// TODO: ...
sb.append("<login url=\"" + originalRequest + "&yanel.usecase=neutron-auth" + "\" method=\"POST\">");
//sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">");
sb.append("<form>");
sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>");
sb.append("<param description=\"Username\" name=\"username\"/>");
sb.append("<param description=\"Password\" name=\"password\"/>");
sb.append("</form>");
sb.append("</login>");
// NOTE: Needs to be a full URL, because user might switch the server ...
// TODO: ...
sb.append("<logout url=\"" + originalRequest + "&yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>");
sb.append("</authentication>");
sb.append("</exception>");
log.debug("Neutron-Auth response: " + sb);
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "NEUTRON-AUTH");
PrintWriter w = response.getWriter();
w.print(sb);
return response;
}
} else {
// TODO: Refactor resp. reuse response from above ...
log.warn("Neutron Authentication failed because username is NULL!");
StringBuffer sb = new StringBuffer("");
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"authentication\">");
sb.append("<message>Authentication failed because no username was sent!</message>");
sb.append("<authentication>");
// TODO: ...
sb.append("<original-request url=\"" + originalRequest + "\"/>");
//sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>");
//TODO: Also support https ...
// TODO: ...
sb.append("<login url=\"" + originalRequest + "&yanel.usecase=neutron-auth" + "\" method=\"POST\">");
//sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">");
sb.append("<form>");
sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>");
sb.append("<param description=\"Username\" name=\"username\"/>");
sb.append("<param description=\"Password\" name=\"password\"/>");
sb.append("</form>");
sb.append("</login>");
// NOTE: Needs to be a full URL, because user might switch the server ...
// TODO: ...
sb.append("<logout url=\"" + originalRequest + "&yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>");
sb.append("</authentication>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "NEUTRON-AUTH");
PrintWriter writer = response.getWriter();
writer.print(sb);
return response;
}
} else {
log.debug("Neutron Authentication successful.");
return null;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
}
/**
*
*/
public HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
log.info("Logout from Yanel ...");
HttpSession session = request.getSession(true);
session.setAttribute(IDENTITY_KEY, null);
String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate");
if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) {
// TODO: send some XML content, e.g. <logout-successful/>
response.setContentType("text/plain");
response.setStatus(response.SC_OK);
PrintWriter writer = response.getWriter();
writer.print("Neutron Logout Successful!");
return response;
}
return null;
}
/**
* Microsoft Internet Explorer does not understand application/xhtml+xml
* See http://en.wikipedia.org/wiki/Criticisms_of_Internet_Explorer#XHTML
*/
public String patchContentType(String contentType, HttpServletRequest request) throws ServletException, IOException {
String httpAcceptMediaTypes = request.getHeader("Accept");
log.debug("HTTP Accept Media Types: " + httpAcceptMediaTypes);
if (contentType != null && contentType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) {
log.info("Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml");
return "text/html";
}
return contentType;
}
/**
* Intercept InputStream and log content ...
*/
public InputStream intercept(InputStream in) throws IOException {
java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream();
byte[] buf = new byte[8192];
int bytesR;
while ((bytesR = in.read(buf)) != -1) {
baos.write(buf, 0, bytesR);
}
// Buffer within memory (TODO: Maybe replace with File-buffering ...)
// http://www-128.ibm.com/developerworks/java/library/j-io1/
byte[] memBuffer = baos.toByteArray();
log.error("DEBUG: InputStream: " + baos);
return new java.io.ByteArrayInputStream(memBuffer);
}
/**
*
*/
private void setYanelOutput(HttpServletRequest request, HttpServletResponse response, Document doc) throws ServletException {
try {
String yanelFormat = request.getParameter("yanel.format");
if(yanelFormat != null && yanelFormat.equals("xml")) {
response.setContentType("application/xml");
OutputStream out = response.getOutputStream();
javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out));
out.close();
} else {
response.setContentType("application/xhtml+xml");
Transformer transformer = TransformerFactory.newInstance().newTransformer(new StreamSource(xsltInfoAndException));
transformer.transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(response.getWriter()));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage());
}
}
/**
* Custom XHTML Form for authentication
*/
public void getXHTMLAuthenticationForm(HttpServletRequest request, HttpServletResponse response, Realm realm, String message) throws ServletException, IOException {
org.w3c.dom.Document doc = null;
javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance();
try {
javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder();
org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation();
org.w3c.dom.DocumentType doctype = null;
doc = impl.createDocument(NAMESPACE, "yanel", doctype);
Element rootElement = doc.getDocumentElement();
if (message != null) {
Element messageElement = (Element) rootElement.appendChild(doc.createElement("message"));
messageElement.appendChild(doc.createTextNode(message));
}
Element requestElement = (Element) rootElement.appendChild(doc.createElement("request"));
requestElement.setAttribute("urlqs", getRequestURLQS(request, null, true));
Element realmElement = (Element) rootElement.appendChild(doc.createElement("realm"));
realmElement.setAttribute("name", realm.getName());
realmElement.setAttribute("mount-point", realm.getMountPoint().toString());
response.setContentType("application/xhtml+xml; charset=UTF-8");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
Transformer transformer = TransformerFactory.newInstance().newTransformer(new StreamSource(xsltLoginScreen));
transformer.transform(new javax.xml.transform.dom.DOMSource(doc),
new javax.xml.transform.stream.StreamResult(response.getWriter()));
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage());
}
}
/**
* Write to output stream of modifiable resource
*/
private void write(InputStream in, OutputStream out, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (out != null) {
log.debug("Content-Type: " + request.getContentType());
// TODO: Compare mime-type from response with mime-type of resource
//if (contentType.equals("text/xml")) { ... }
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
out.flush();
out.close();
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<html>");
sb.append("<body>");
sb.append("<p>Data has been saved ...</p>");
sb.append("</body>");
sb.append("</html>");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK);
response.setContentType("application/xhtml+xml");
PrintWriter w = response.getWriter();
w.print(sb);
log.info("Data has been saved ...");
return;
} else {
log.error("OutputStream is null!");
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<html>");
sb.append("<body>");
sb.append("<p>Exception: OutputStream is null!</p>");
sb.append("</body>");
sb.append("</html>");
PrintWriter w = response.getWriter();
w.print(sb);
response.setContentType("application/xhtml+xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
}
}
| src/core/java/org/wyona/yanel/servlet/YanelServlet.java | package org.wyona.yanel.servlet;
import java.io.File;
import java.io.BufferedReader;
import java.io.InputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.PrintWriter;
import java.io.Writer;
import java.net.URL;
import java.util.Enumeration;
import javax.servlet.ServletConfig;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.stream.StreamSource;
import org.wyona.yanel.core.Path;
import org.wyona.yanel.core.Resource;
import org.wyona.yanel.core.ResourceTypeDefinition;
import org.wyona.yanel.core.ResourceTypeIdentifier;
import org.wyona.yanel.core.ResourceTypeRegistry;
import org.wyona.yanel.core.Yanel;
import org.wyona.yanel.core.api.attributes.ModifiableV1;
import org.wyona.yanel.core.api.attributes.ModifiableV2;
import org.wyona.yanel.core.api.attributes.VersionableV2;
import org.wyona.yanel.core.api.attributes.ViewableV1;
import org.wyona.yanel.core.api.attributes.ViewableV2;
import org.wyona.yanel.core.attributes.viewable.View;
import org.wyona.yanel.core.navigation.Node;
import org.wyona.yanel.core.navigation.Sitetree;
import org.wyona.yanel.core.map.Map;
import org.wyona.yanel.core.map.Realm;
import org.wyona.yanel.servlet.CreateUsecaseHelper;
import org.wyona.yanel.servlet.communication.HttpRequest;
import org.wyona.yanel.servlet.communication.HttpResponse;
import org.wyona.yanel.util.ResourceAttributeHelper;
import org.wyona.security.core.AuthenticationException;
import org.wyona.security.core.api.Identity;
import org.wyona.security.core.api.IdentityManager;
import org.wyona.security.core.api.PolicyManager;
import org.wyona.security.core.api.Role;
import org.apache.log4j.Category;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.DefaultConfigurationBuilder;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
/**
*
*/
public class YanelServlet extends HttpServlet {
private static Category log = Category.getInstance(YanelServlet.class);
private ServletConfig config;
ResourceTypeRegistry rtr;
//PolicyManager pm;
//IdentityManager im;
Map map;
Yanel yanel;
Sitetree sitetree;
File xsltInfoAndException;
File xsltLoginScreen;
private static String IDENTITY_KEY = "identity";
private static String NAMESPACE = "http://www.wyona.org/yanel/1.0";
private static final String METHOD_PROPFIND = "PROPFIND";
private static final String METHOD_OPTIONS = "OPTIONS";
private static final String METHOD_GET = "GET";
private static final String METHOD_POST = "POST";
private static final String METHOD_PUT = "PUT";
private static final String METHOD_DELETE = "DELETE";
private String sslPort = null;
/**
*
*/
public void init(ServletConfig config) throws ServletException {
this.config = config;
xsltInfoAndException = org.wyona.commons.io.FileUtil.file(config.getServletContext().getRealPath("/"), config.getInitParameter("exception-and-info-screen-xslt"));
xsltLoginScreen = org.wyona.commons.io.FileUtil.file(config.getServletContext().getRealPath("/"), config.getInitParameter("login-screen-xslt"));
try {
yanel = Yanel.getInstance();
yanel.init();
rtr = yanel.getResourceTypeRegistry();
map = (Map) yanel.getBeanFactory().getBean("map");
sitetree = (Sitetree) yanel.getBeanFactory().getBean("nav-sitetree");
sslPort = config.getInitParameter("ssl-port");
} catch (Exception e) {
log.error(e);
throw new ServletException(e.getMessage(), e);
}
}
/**
*
*/
public void service(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String httpAcceptMediaTypes = request.getHeader("Accept");
log.debug("HTTP Accept Media Types: " + httpAcceptMediaTypes);
String httpUserAgent = request.getHeader("User-Agent");
log.debug("HTTP User Agent: " + httpUserAgent);
String httpAcceptLanguage = request.getHeader("Accept-Language");
log.debug("HTTP Accept Language: " + httpAcceptLanguage);
// Logout from Yanel
String yanelUsecase = request.getParameter("yanel.usecase");
if(yanelUsecase != null && yanelUsecase.equals("logout")) {
if(doLogout(request, response) != null) return;
}
// Authentication
if(doAuthenticate(request, response) != null) return;
// Check authorization
if(doAuthorize(request, response) != null) return;
// Delegate ...
String method = request.getMethod();
if (method.equals(METHOD_PROPFIND)) {
doPropfind(request, response);
} else if (method.equals(METHOD_GET)) {
doGet(request, response);
} else if (method.equals(METHOD_POST)) {
doPost(request, response);
} else if (method.equals(METHOD_PUT)) {
doPut(request, response);
} else if (method.equals(METHOD_DELETE)) {
doDelete(request, response);
} else if (method.equals(METHOD_OPTIONS)) {
doOptions(request, response);
} else {
log.error("No such method implemented: " + method);
response.sendError(response.SC_NOT_IMPLEMENTED);
}
}
/**
*
*/
public void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// Check if a new resource shall be created ...
String yanelUsecase = request.getParameter("yanel.usecase");
if(yanelUsecase != null && yanelUsecase.equals("create")) {
CreateUsecaseHelper creator = new CreateUsecaseHelper();
creator.create(request, response, yanel);
return;
}
getContent(request, response);
}
/**
*
*/
private void getContent(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
View view = null;
org.w3c.dom.Document doc = null;
javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance();
dbf.setNamespaceAware(true);
try {
javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder();
org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation();
org.w3c.dom.DocumentType doctype = null;
doc = impl.createDocument(NAMESPACE, "yanel", doctype);
} catch(Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage());
}
Element rootElement = doc.getDocumentElement();
String servletContextRealPath = config.getServletContext().getRealPath("/");
rootElement.setAttribute("servlet-context-real-path", servletContextRealPath);
//log.deubg("servletContextRealPath: " + servletContextRealPath);
//log.debug("contextPath: " + request.getContextPath());
//log.debug("servletPath: " + request.getServletPath());
Element requestElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "request"));
requestElement.setAttributeNS(NAMESPACE, "uri", request.getRequestURI());
requestElement.setAttributeNS(NAMESPACE, "servlet-path", request.getServletPath());
HttpSession session = request.getSession(true);
Element sessionElement = (Element) rootElement.appendChild(doc.createElement("session"));
sessionElement.setAttribute("id", session.getId());
Enumeration attrNames = session.getAttributeNames();
if (!attrNames.hasMoreElements()) {
Element sessionNoAttributesElement = (Element) sessionElement.appendChild(doc.createElement("no-attributes"));
}
while (attrNames.hasMoreElements()) {
String name = (String)attrNames.nextElement();
String value = session.getAttribute(name).toString();
Element sessionAttributeElement = (Element) sessionElement.appendChild(doc.createElement("attribute"));
sessionAttributeElement.setAttribute("name", name);
sessionAttributeElement.appendChild(doc.createTextNode(value));
}
Realm realm;
Path path;
ResourceTypeIdentifier rti;
try {
realm = map.getRealm(request.getServletPath());
path = map.getPath(realm, request.getServletPath());
rti = yanel.getResourceManager().getResourceTypeIdentifier(realm, path);
} catch (Exception e) {
String message = "URL could not be mapped to realm/path " + e.getMessage();
log.error(message, e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
//String rti = map.getResourceTypeIdentifier(new Path(request.getServletPath()));
Resource res = null;
long lastModified = -1;
long size = -1;
if (rti != null) {
ResourceTypeDefinition rtd = rtr.getResourceTypeDefinition(rti.getUniversalName());
if (rtd == null) {
String message = "No such resource type registered: " + rti.getUniversalName() + ", check " + rtr.getConfigurationFile();
log.error(message);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
Element rtiElement = (Element) rootElement.appendChild(doc.createElement("resource-type-identifier"));
rtiElement.setAttribute("namespace", rtd.getResourceTypeNamespace());
rtiElement.setAttribute("local-name", rtd.getResourceTypeLocalName());
try {
HttpRequest httpRequest = new HttpRequest(request);
HttpResponse httpResponse = new HttpResponse(response);
res = yanel.getResourceManager().getResource(httpRequest, httpResponse, realm, path, rtd, rti);
if (res != null) {
Element resourceElement = (Element) rootElement.appendChild(doc.createElement("resource"));
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "1")) {
log.info("Resource is viewable V1");
Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view"));
viewElement.appendChild(doc.createTextNode("View Descriptors: " + ((ViewableV1) res).getViewDescriptors()));
String viewId = request.getParameter("yanel.resource.viewid");
try {
view = ((ViewableV1) res).getView(request, viewId);
} catch(org.wyona.yarep.core.NoSuchNodeException e) {
// TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ...
String message = "No such node exception: " + e;
log.warn(e);
//log.error(e.getMessage(), e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
exceptionElement.setAttribute("status", "404");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND);
setYanelOutput(request, response, doc);
return;
} catch(Exception e) {
log.error(e.getMessage(), e);
String message = e.toString();
log.error(e.getMessage(), e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
exceptionElement.setAttribute("status", "500");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
setYanelOutput(request, response, doc);
return;
}
} else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Viewable", "2")) {
log.info("Resource is viewable V2");
String viewId = request.getParameter("yanel.resource.viewid");
Element viewElement = (Element) resourceElement.appendChild(doc.createElement("view"));
viewElement.appendChild(doc.createTextNode("View Descriptors: " + ((ViewableV2) res).getViewDescriptors()));
size = ((ViewableV2) res).getSize();
Element sizeElement = (Element) resourceElement.appendChild(doc.createElement("size"));
sizeElement.appendChild(doc.createTextNode(String.valueOf(size)));
try {
view = ((ViewableV2) res).getView(viewId);
} catch(org.wyona.yarep.core.NoSuchNodeException e) {
// TODO: Log all 404 within a dedicated file (with client info attached) such that an admin can react to it ...
String message = "No such node exception: " + e;
log.warn(e);
//log.error(e.getMessage(), e);
Element exceptionElement = (Element) rootElement.appendChild(doc.createElementNS(NAMESPACE, "exception"));
exceptionElement.appendChild(doc.createTextNode(message));
exceptionElement.setAttributeNS(NAMESPACE, "status", "404");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_FOUND);
setYanelOutput(request, response, doc);
return;
}
} else {
Element noViewElement = (Element) resourceElement.appendChild(doc.createElement("not-viewable"));
noViewElement.appendChild(doc.createTextNode(res.getClass().getName() + " is not viewable!"));
}
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) {
lastModified = ((ModifiableV2) res).getLastModified();
Element lastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("last-modified"));
lastModifiedElement.appendChild(doc.createTextNode(new java.util.Date(lastModified).toString()));
} else {
Element noLastModifiedElement = (Element) resourceElement.appendChild(doc.createElement("no-last-modified"));
}
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Versionable", "2")) {
// retrieve the revisions, but only in the meta usecase (for performance reasons):
if (request.getParameter("yanel.resource.meta") != null) {
String[] revisions = ((VersionableV2)res).getRevisions();
Element revisionsElement = (Element) resourceElement.appendChild(doc.createElement("revisions"));
if (revisions != null) {
for (int i=0; i<revisions.length; i++) {
Element revisionElement = (Element) revisionsElement.appendChild(doc.createElement("revision"));
revisionElement.appendChild(doc.createTextNode(revisions[i]));
}
} else {
Element noRevisionsYetElement = (Element) resourceElement.appendChild(doc.createElement("no-revisions-yet"));
}
}
} else {
Element notVersionableElement = (Element) resourceElement.appendChild(doc.createElement("not-versionable"));
}
} else {
Element resourceIsNullElement = (Element) rootElement.appendChild(doc.createElement("resource-is-null"));
}
} catch(Exception e) {
log.error(e.getMessage(), e);
String message = e.toString();
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
} else {
Element noRTIFoundElement = (Element) rootElement.appendChild(doc.createElement("no-resource-type-identifier-found"));
noRTIFoundElement.setAttribute("servlet-path", request.getServletPath());
}
String usecase = request.getParameter("yanel.resource.usecase");
if (usecase != null && usecase.equals("checkout")) {
log.debug("Checkout data ...");
// TODO: Implement checkout ...
log.warn("Acquire lock has not been implemented yet ...!");
// acquireLock();
}
String meta = request.getParameter("yanel.resource.meta");
if (meta != null) {
if (meta.length() > 0) {
log.error("DEBUG: meta length: " + meta.length());
} else {
log.error("DEBUG: Show all meta");
}
response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK);
setYanelOutput(request, response, doc);
return;
}
if (view != null) {
// check if the view contatins the response (otherwise assume that the resource
// wrote the response, and just return).
if (!view.isResponse()) return;
response.setContentType(patchContentType(view.getMimeType(), request));
InputStream is = view.getInputStream();
//BufferedReader reader = new BufferedReader(new InputStreamReader(is));
//String line;
//System.out.println("getContentXML: "+path);
//while ((line = reader.readLine()) != null) System.out.println(line);
byte buffer[] = new byte[8192];
int bytesRead;
if (is != null) {
// TODO: Yarep does not set returned Stream to null resp. is missing Exception Handling for the constructor. Exceptions should be handled here, but rather within Yarep or whatever repositary layer is being used ...
bytesRead = is.read(buffer);
if (bytesRead == -1) {
String message = "InputStream of view does not seem to contain any data!";
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
// TODO: Compare If-Modified-Since with lastModified and return 304 without content resp. check on ETag
String ifModifiedSince = request.getHeader("If-Modified-Since");
if (ifModifiedSince != null) {
log.warn("TODO: Implement 304 ...");
}
java.io.OutputStream os = response.getOutputStream();
os.write(buffer, 0, bytesRead);
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
if(lastModified >= 0) response.setDateHeader("Last-Modified", lastModified);
return;
} else {
String message = "InputStream of view is null!";
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
}
} else {
String message = "View is null!";
Element exceptionElement = (Element) rootElement.appendChild(doc.createElement("exception"));
exceptionElement.appendChild(doc.createTextNode(message));
}
setYanelOutput(request, response, doc);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
/**
*
*/
public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
String value = request.getParameter("yanel.resource.usecase");
if (value != null && value.equals("save")) {
log.debug("Save data ...");
save(request, response);
return;
} else if (value != null && value.equals("checkin")) {
log.debug("Checkin data ...");
save(request, response);
// TODO: Implement checkin ...
log.warn("Release lock has not been implemented yet ...");
// releaseLock();
return;
} else {
log.info("No parameter yanel.resource.usecase!");
String contentType = request.getContentType();
if (contentType.indexOf("application/atom+xml") >= 0) {
InputStream in = intercept(request.getInputStream());
// Create new Atom entry
try {
String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>";
org.wyona.yanel.core.map.Realm realm = yanel.getMap().getRealm(request.getServletPath());
Path newEntryPath = yanel.getMap().getPath(realm, request.getServletPath() + "/" + new java.util.Date().getTime() + ".xml");
log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + newEntryPath);
Resource atomEntryResource = yanel.getResourceManager().getResource(request, response, realm, newEntryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new org.wyona.yanel.core.ResourceTypeIdentifier(atomEntryUniversalName, null));
((ModifiableV2)atomEntryResource).write(in);
byte buffer[] = new byte[8192];
int bytesRead;
InputStream resourceIn = ((ModifiableV2)atomEntryResource).getInputStream();
OutputStream responseOut = response.getOutputStream();
while ((bytesRead = resourceIn.read(buffer)) != -1) {
responseOut.write(buffer, 0, bytesRead);
}
// TODO: Fix Location ...
response.setHeader("Location", "http://ulysses.wyona.org" + newEntryPath);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_CREATED);
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new IOException(e.getMessage());
}
}
getContent(request, response);
}
}
/**
* HTTP PUT implementation
*/
public void doPut(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
// TODO: Reuse code doPost resp. share code with doPut
String value = request.getParameter("yanel.resource.usecase");
if (value != null && value.equals("save")) {
log.debug("Save data ...");
save(request, response);
return;
} else if (value != null && value.equals("checkin")) {
log.debug("Checkin data ...");
save(request, response);
// TODO: Implement checkin ...
log.warn("Release lock has not been implemented yet ...!");
// releaseLock();
return;
} else {
log.warn("No parameter yanel.resource.usecase!");
String contentType = request.getContentType();
if (contentType.indexOf("application/atom+xml") >= 0) {
InputStream in = intercept(request.getInputStream());
// Overwrite existing atom entry
try {
String atomEntryUniversalName = "<{http://www.wyona.org/yanel/resource/1.0}atom-entry/>";
org.wyona.yanel.core.map.Realm realm = yanel.getMap().getRealm(request.getServletPath());
Path entryPath = yanel.getMap().getPath(realm, request.getServletPath());
log.error("DEBUG: Realm and Path of new Atom entry: " + realm + " " + entryPath);
Resource atomEntryResource = yanel.getResourceManager().getResource(request, response, realm, entryPath, new ResourceTypeRegistry().getResourceTypeDefinition(atomEntryUniversalName), new org.wyona.yanel.core.ResourceTypeIdentifier(atomEntryUniversalName, null));
// TODO: There seems to be a problem ...
((ModifiableV2)atomEntryResource).write(in);
// NOTE: This method does not update updated date
/*
OutputStream out = ((ModifiableV2)atomEntry).getOutputStream(entryPath);
byte buffer[] = new byte[8192];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
*/
log.info("Atom entry has been saved: " + entryPath);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK);
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new IOException(e.getMessage());
}
} else {
save(request, response);
/*
log.warn("TODO: WebDAV PUT ...");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_NOT_IMPLEMENTED);
return;
*/
}
}
}
/**
* HTTP DELETE implementation
*/
public void doDelete(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
Resource res = getResource(request, response);
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) {
if (((ModifiableV2) res).delete()) {
log.debug("Resource has been deleted: " + res);
response.setStatus(response.SC_OK);
return;
} else {
log.warn("Resource could not be deleted: " + res);
response.setStatus(response.SC_FORBIDDEN);
return;
}
} else {
log.error("Resource '" + res + "' has interface ModifiableV2 not implemented." );
response.sendError(response.SC_NOT_IMPLEMENTED);
return;
}
} catch (Exception e) {
log.error("Could not delete resource with URL " + request.getRequestURL() + " " + e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
}
/**
*
*/
private Resource getResource(HttpServletRequest request, HttpServletResponse response) {
try {
Realm realm = map.getRealm(request.getServletPath());
Path path = map.getPath(realm, request.getServletPath());
HttpRequest httpRequest = new HttpRequest(request);
HttpResponse httpResponse = new HttpResponse(response);
Resource res = yanel.getResourceManager().getResource(httpRequest, httpResponse, realm, path);
return res;
} catch(Exception e) {
log.error(e.getMessage(), e);
return null;
}
}
/**
* Save data
*/
private void save(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
log.debug("Save data ...");
InputStream in = request.getInputStream();
java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream();
byte[] buf = new byte[8192];
int bytesR;
while ((bytesR = in.read(buf)) != -1) {
baos.write(buf, 0, bytesR);
}
// Buffer within memory (TODO: Maybe replace with File-buffering ...)
// http://www-128.ibm.com/developerworks/java/library/j-io1/
byte[] memBuffer = baos.toByteArray();
// TODO: Should be delegated to resource type, e.g. <{http://...}xml/>!
// Check on well-formedness ...
String contentType = request.getContentType();
log.debug("Content-Type: " + contentType);
if (contentType.indexOf("application/xml") >= 0 || contentType.indexOf("application/xhtml+xml") >= 0) {
log.info("Check well-formedness ...");
javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance();
try {
javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder();
// TODO: Get log messages into log4j ...
//parser.setErrorHandler(...);
// NOTE: DOCTYPE is being resolved/retrieved (e.g. xhtml schema from w3.org) also
// if isValidating is set to false.
// Hence, for performance and network reasons we use a local catalog ...
// Also see http://www.xml.com/pub/a/2004/03/03/catalogs.html
// resp. http://xml.apache.org/commons/components/resolver/
// TODO: What about a resolver factory?
parser.setEntityResolver(new org.apache.xml.resolver.tools.CatalogResolver());
parser.parse(new java.io.ByteArrayInputStream(memBuffer));
//org.w3c.dom.Document document = parser.parse(new ByteArrayInputStream(memBuffer));
} catch (org.xml.sax.SAXException e) {
log.warn("Data is not well-formed: "+e.getMessage());
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"data-not-well-formed\">");
sb.append("<message>Data is not well-formed: "+e.getMessage()+"</message>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
PrintWriter w = response.getWriter();
w.print(sb);
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">");
//sb.append("<message>" + e.getStackTrace() + "</message>");
//sb.append("<message>" + e.getMessage() + "</message>");
sb.append("<message>" + e + "</message>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
PrintWriter w = response.getWriter();
w.print(sb);
return;
}
log.info("Data seems to be well-formed :-)");
} else {
log.info("No well-formedness check required for content type: " + contentType);
}
java.io.ByteArrayInputStream memIn = new java.io.ByteArrayInputStream(memBuffer);
// IMPORTANT TODO: Use ModifiableV2.write(InputStream in) such that resource can modify data during saving resp. check if getOutputStream is equals null and then use write ....
OutputStream out = null;
Resource res = getResource(request, response);
if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "1")) {
out = ((ModifiableV1) res).getOutputStream(new Path(request.getServletPath()));
write(memIn, out, request, response);
return;
} else if (ResourceAttributeHelper.hasAttributeImplemented(res, "Modifiable", "2")) {
try {
out = ((ModifiableV2) res).getOutputStream();
if (out != null) {
write(memIn, out, request, response);
} else {
((ModifiableV2) res).write(memIn);
}
return;
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
} else {
String message = res.getClass().getName() + " is not modifiable (neither V1 nor V2)!";
log.warn(message);
StringBuffer sb = new StringBuffer();
// TODO: Differentiate between Neutron based and other clients ...
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"neutron\">");
sb.append("<message>" + message + "</message>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
PrintWriter w = response.getWriter();
w.print(sb);
return;
}
}
/**
* Authorize request (and also authenticate for HTTP BASIC)
*/
private HttpServletResponse doAuthorize(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Role role = null;
// TODO: Replace hardcoded roles by mapping between roles amd query strings ...
String value = request.getParameter("yanel.resource.usecase");
String contentType = request.getContentType();
String method = request.getMethod();
if (value != null && value.equals("save")) {
log.debug("Save data ...");
role = new Role("write");
} else if (value != null && value.equals("checkin")) {
log.debug("Checkin data ...");
role = new Role("write");
} else if (value != null && value.equals("checkout")) {
log.debug("Checkout data ...");
role = new Role("open");
} else if (contentType != null && contentType.indexOf("application/atom+xml") >= 0 && (method.equals(METHOD_PUT) || method.equals(METHOD_POST))) {
// TODO: Is posting atom entries different from a general post (see below)?!
log.error("DEBUG: Write/Checkin Atom entry ...");
role = new Role("write");
} else if (method.equals(METHOD_PUT) || method.equals(METHOD_POST)) {
log.error("DEBUG: Upload data ...");
role = new Role("write");
} else if (method.equals(METHOD_DELETE)) {
log.error("DEBUG: Delete resource ...");
role = new Role("delete");
} else {
log.debug("Role will be 'view'!");
role = new Role("view");
}
value = request.getParameter("yanel.usecase");
if (value != null && value.equals("create")) {
log.debug("Create new resource ...");
role = new Role("create");
}
boolean authorized = false;
Realm realm;
Path path;
try {
realm = map.getRealm(request.getServletPath());
path = map.getPath(realm, request.getServletPath());
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
// HTTP BASIC Authorization (For clients such as for instance Sunbird, OpenOffice or cadaver)
// IMPORT NOTE: BASIC Authentication needs to be checked on every request, because clients often do not support session handling
String authorization = request.getHeader("Authorization");
log.debug("Checking for Authorization Header: " + authorization);
if (authorization != null) {
if (authorization.toUpperCase().startsWith("BASIC")) {
log.debug("Using BASIC authorization ...");
// Get encoded user and password, comes after "BASIC "
String userpassEncoded = authorization.substring(6);
// Decode it, using any base 64 decoder
sun.misc.BASE64Decoder dec = new sun.misc.BASE64Decoder();
String userpassDecoded = new String(dec.decodeBuffer(userpassEncoded));
log.debug("Username and Password Decoded: " + userpassDecoded);
String[] up = userpassDecoded.split(":");
String username = up[0];
String password = up[1];
log.debug("username: " + username + ", password: " + password);
try {
if (realm.getIdentityManager().authenticate(username, password)) {
authorized = realm.getPolicyManager().authorize(path, new Identity(username, null), new Role("view"));
if(authorized) {
return null;
} else {
log.warn("HTTP BASIC Authorization failed for " + username + "!");
response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\"");
response.sendError(response.SC_UNAUTHORIZED);
PrintWriter writer = response.getWriter();
writer.print("BASIC Authorization Failed!");
return response;
}
} else {
log.warn("HTTP BASIC Authentication failed for " + username + "!");
response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\"");
response.sendError(response.SC_UNAUTHORIZED);
PrintWriter writer = response.getWriter();
writer.print("BASIC Authentication Failed!");
return response;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
} else if (authorization.toUpperCase().startsWith("DIGEST")) {
log.error("DIGEST is not implemented");
authorized = false;
response.sendError(response.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "DIGEST realm=\"" + realm.getName() + "\"");
PrintWriter writer = response.getWriter();
writer.print("DIGEST is not implemented!");
return response;
} else {
log.warn("No such authorization implemented resp. handled by session based authorization: " + authorization);
authorized = false;
}
}
// Custom Authorization
log.debug("Do session based custom authorization");
//String[] groupnames = {"null", "null"};
HttpSession session = request.getSession(true);
Identity identity = (Identity) session.getAttribute(IDENTITY_KEY);
if (identity == null) {
log.debug("Identity is WORLD");
identity = new Identity();
}
//authorized = pm.authorize(new org.wyona.commons.io.Path(request.getServletPath()), identity, role);
try {
log.debug("Check authorization: realm: " + realm + ", path: " + path + ", identity: " + identity.getUsername() + ", role: " + role.getName());
authorized = realm.getPolicyManager().authorize(path, identity, role);
log.debug("Check authorization result: " + authorized);
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
if(!authorized) {
log.warn("Access denied: " + getRequestURLQS(request, null, false));
if(!request.isSecure()) {
if(sslPort != null) {
log.info("Redirect to SSL ...");
try {
URL url = new URL(getRequestURLQS(request, null, false).toString());
url = new URL("https", url.getHost(), new Integer(sslPort).intValue(), url.getFile());
response.setHeader("Location", url.toString());
// TODO: Yulup has a bug re TEMPORARY_REDIRECT
//response.setStatus(javax.servlet.http.HttpServletResponse.SC_TEMPORARY_REDIRECT);
response.setStatus(javax.servlet.http.HttpServletResponse.SC_MOVED_PERMANENTLY);
return response;
} catch (Exception e) {
log.error(e);
}
} else {
log.warn("SSL does not seem to be configured!");
}
}
// TODO: Shouldn't this be here instead at the beginning of service() ...?
//if(doAuthenticate(request, response) != null) return response;
// Check if this is a neutron request, a Sunbird/Calendar request or just a common GET request
// Also see e-mail about recognizing a WebDAV request: http://lists.w3.org/Archives/Public/w3c-dist-auth/2006AprJun/0064.html
StringBuffer sb = new StringBuffer("");
String neutronVersions = request.getHeader("Neutron");
String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate");
if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) {
log.debug("Neutron Versions supported by client: " + neutronVersions);
log.debug("Authentication Scheme supported by client: " + clientSupportedAuthScheme);
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"authorization\">");
sb.append("<message>Authorization denied: " + getRequestURLQS(request, null, true) + "</message>");
sb.append("<authentication>");
sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>");
//TODO: Also support https ...
sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">");
sb.append("<form>");
sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>");
sb.append("<param description=\"Username\" name=\"username\"/>");
sb.append("<param description=\"Password\" name=\"password\"/>");
sb.append("</form>");
sb.append("</login>");
// NOTE: Needs to be a full URL, because user might switch the server ...
sb.append("<logout url=\"" + getRequestURLQS(request, "yanel.usecase=logout", true) + "\" realm=\"" + realm.getName() + "\"/>");
sb.append("</authentication>");
sb.append("</exception>");
log.debug("Neutron-Auth response: " + sb);
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "NEUTRON-AUTH");
PrintWriter w = response.getWriter();
w.print(sb);
} else if (request.getRequestURI().endsWith(".ics")) {
log.warn("Somebody seems to ask for a Calendar (ICS) ...");
response.setHeader("WWW-Authenticate", "BASIC realm=\"" + realm.getName() + "\"");
response.sendError(response.SC_UNAUTHORIZED);
} else {
getXHTMLAuthenticationForm(request, response, realm, null);
}
return response;
} else {
log.info("Access granted: " + getRequestURLQS(request, null, false));
return null;
}
}
/**
*
*/
private String getRequestURLQS(HttpServletRequest request, String addQS, boolean xml) {
//Realm realm = map.getRealm(new Path(request.getServletPath()));
try {
Realm realm = map.getRealm(request.getServletPath());
// TODO: Handle this exception more gracefully!
if (realm == null) log.error("No realm found for path " + new Path(request.getServletPath()));
String proxyHostName = realm.getProxyHostName();
String proxyPort = realm.getProxyPort();
String proxyPrefix = realm.getProxyPrefix();
URL url = null;
url = new URL(request.getRequestURL().toString());
if (proxyHostName != null) {
url = new URL(url.getProtocol(), proxyHostName, url.getPort(), url.getFile());
}
if (proxyPort != null) {
if (proxyPort.length() > 0) {
url = new URL(url.getProtocol(), url.getHost(), new Integer(proxyPort).intValue(), url.getFile());
} else {
url = new URL(url.getProtocol(), url.getHost(), url.getDefaultPort(), url.getFile());
}
}
if (proxyPrefix != null) {
url = new URL(url.getProtocol(), url.getHost(), url.getPort(), url.getFile().substring(proxyPrefix.length()));
}
if(proxyHostName != null || proxyPort != null || proxyPrefix != null) {
log.debug("Proxy enabled request: " + url);
}
String urlQS = url.toString();
if (request.getQueryString() != null) {
urlQS = urlQS + "?" + request.getQueryString();
if (addQS != null) urlQS = urlQS + "&" + addQS;
} else {
if (addQS != null) urlQS = urlQS + "?" + addQS;
}
if (xml) urlQS = urlQS.replaceAll("&", "&");
log.debug("Request: " + urlQS);
return urlQS;
} catch (Exception e) {
log.error(e);
return null;
}
}
/**
* Also see https://svn.apache.org/repos/asf/tomcat/container/branches/tc5.0.x/catalina/src/share/org/apache/catalina/servlets/WebdavServlet.java
* Also maybe interesting http://sourceforge.net/projects/openharmonise
*/
public void doPropfind(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
Resource resource = getResource(request, response);
//Node node = resource.getRealm().getSitetree().getNode(resource.getPath());
Node node = sitetree.getNode(resource.getRealm(),resource.getPath());
String depth = request.getHeader("Depth");
StringBuffer sb = new StringBuffer("<?xml version=\"1.0\"?>");
sb.append("<multistatus xmlns=\"DAV:\">");
if (depth.equals("0")) {
if (node.isCollection()) {
sb.append(" <response>");
sb.append(" <href>"+request.getRequestURI()+"</href>");
sb.append(" <propstat>");
sb.append(" <prop>");
sb.append(" <resourcetype><collection/></resourcetype>");
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>");
sb.append(" </prop>");
sb.append(" <status>HTTP/1.1 200 OK</status>");
sb.append(" </propstat>");
sb.append(" </response>");
} else if (node.isResource()) {
sb.append(" <response>");
sb.append(" <href>"+request.getRequestURI()+"</href>");
sb.append(" <propstat>");
sb.append(" <prop>");
sb.append(" <resourcetype/>");
// TODO: Does getcontenttype also be set for resources?
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>");
sb.append(" </prop>");
sb.append(" <status>HTTP/1.1 200 OK</status>");
sb.append(" </propstat>");
sb.append(" </response>");
} else {
log.error("Neither collection nor resource!");
}
} else if (depth.equals("1")) {
Node[] children = node.getChildren();
if (children != null) {
for (int i = 0; i < children.length; i++) {
if (children[i].isCollection()) {
sb.append(" <response>\n");
sb.append(" <href>" + request.getRequestURI() + children[i].getPath() + "/</href>\n");
sb.append(" <propstat>\n");
sb.append(" <prop>\n");
sb.append(" <displayname>A Directory</displayname>\n");
sb.append(" <resourcetype><collection/></resourcetype>\n");
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>\n");
sb.append(" </prop>\n");
sb.append(" <status>HTTP/1.1 200 OK</status>\n");
sb.append(" </propstat>\n");
sb.append(" </response>\n");
} else if(children[i].isResource()) {
sb.append(" <response>\n");
sb.append(" <href>"+request.getRequestURI()+children[i].getPath()+"</href>\n");
sb.append(" <propstat>\n");
sb.append(" <prop>\n");
sb.append(" <displayname>A File</displayname>\n");
sb.append(" <resourcetype/>\n");
sb.append(" <getcontenttype>http/unix-directory</getcontenttype>\n");
sb.append(" </prop>\n");
sb.append(" <status>HTTP/1.1 200 OK</status>\n");
sb.append(" </propstat>\n");
sb.append(" </response>\n");
} else {
log.error("Neither collection nor resource: " + children[i].getPath());
}
}
} else {
log.warn("No children!");
}
} else if (depth.equals("infinity")) {
log.warn("TODO: List children and their children and their children ...");
} else {
log.error("No such depth: " + depth);
}
sb.append("</multistatus>");
//response.setStatus(javax.servlet.http.HttpServletResponse.SC_MULTI_STATUS);
response.setStatus(207, "Multi-Status");
PrintWriter w = response.getWriter();
w.print(sb);
}
/**
*
*/
public void doOptions(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
response.setHeader("DAV", "1");
// TODO: Is there anything else to do?!
}
/**
* Authentication
* @return null when authentication successful, otherwise return response
*/
public HttpServletResponse doAuthenticate(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
try {
Realm realm = map.getRealm(request.getServletPath());
Path path = map.getPath(realm, request.getServletPath());
//Realm realm = map.getRealm(new Path(request.getServletPath()));
// HTML Form based authentication
String loginUsername = request.getParameter("yanel.login.username");
if(loginUsername != null) {
HttpSession session = request.getSession(true);
try {
if (realm.getIdentityManager().authenticate(loginUsername, request.getParameter("yanel.login.password"))) {
log.debug("Realm: " + realm);
session.setAttribute(IDENTITY_KEY, new Identity(loginUsername, null));
return null;
} else {
log.warn("Login failed: " + loginUsername);
getXHTMLAuthenticationForm(request, response, realm, "Login failed!");
return response;
}
} catch (Exception e) {
log.warn("Login failed: " + loginUsername + " " + e);
getXHTMLAuthenticationForm(request, response, realm, "Login failed!");
return response;
}
}
// Neutron-Auth based authentication
String yanelUsecase = request.getParameter("yanel.usecase");
if(yanelUsecase != null && yanelUsecase.equals("neutron-auth")) {
log.debug("Neutron Authentication ...");
String username = null;
String password = null;
String originalRequest = null;
DefaultConfigurationBuilder builder = new DefaultConfigurationBuilder();
try {
Configuration config = builder.build(request.getInputStream());
Configuration originalRequestConfig = config.getChild("original-request");
originalRequest = originalRequestConfig.getAttribute("url", null);
Configuration[] paramConfig = config.getChildren("param");
for (int i = 0; i < paramConfig.length; i++) {
String paramName = paramConfig[i].getAttribute("name", null);
if (paramName != null) {
if (paramName.equals("username")) {
username = paramConfig[i].getValue();
} else if (paramName.equals("password")) {
password = paramConfig[i].getValue();
}
}
}
} catch(Exception e) {
log.warn(e);
}
log.debug("Username: " + username);
if (username != null) {
HttpSession session = request.getSession(true);
log.debug("Realm ID: " + realm.getID());
if (realm.getIdentityManager().authenticate(username, password)) {
log.info("Authentication successful: " + username);
session.setAttribute(IDENTITY_KEY, new Identity(username, null));
// TODO: send some XML content, e.g. <authentication-successful/>
response.setContentType("text/plain");
response.setStatus(response.SC_OK);
PrintWriter writer = response.getWriter();
writer.print("Neutron Authentication Successful!");
return response;
} else {
log.warn("Neutron Authentication failed: " + username);
// TODO: Refactor this code with the one from doAuthenticate ...
log.debug("Original Request: " + originalRequest);
StringBuffer sb = new StringBuffer("");
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"authentication\">");
sb.append("<message>Authentication failed!</message>");
sb.append("<authentication>");
// TODO: ...
sb.append("<original-request url=\"" + originalRequest + "\"/>");
//sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>");
//TODO: Also support https ...
// TODO: ...
sb.append("<login url=\"" + originalRequest + "&yanel.usecase=neutron-auth" + "\" method=\"POST\">");
//sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">");
sb.append("<form>");
sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>");
sb.append("<param description=\"Username\" name=\"username\"/>");
sb.append("<param description=\"Password\" name=\"password\"/>");
sb.append("</form>");
sb.append("</login>");
// NOTE: Needs to be a full URL, because user might switch the server ...
// TODO: ...
sb.append("<logout url=\"" + originalRequest + "&yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>");
sb.append("</authentication>");
sb.append("</exception>");
log.debug("Neutron-Auth response: " + sb);
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "NEUTRON-AUTH");
PrintWriter w = response.getWriter();
w.print(sb);
return response;
}
} else {
// TODO: Refactor resp. reuse response from above ...
log.warn("Neutron Authentication failed because username is NULL!");
StringBuffer sb = new StringBuffer("");
sb.append("<?xml version=\"1.0\"?>");
sb.append("<exception xmlns=\"http://www.wyona.org/neutron/1.0\" type=\"authentication\">");
sb.append("<message>Authentication failed because no username was sent!</message>");
sb.append("<authentication>");
// TODO: ...
sb.append("<original-request url=\"" + originalRequest + "\"/>");
//sb.append("<original-request url=\"" + getRequestURLQS(request, null, true) + "\"/>");
//TODO: Also support https ...
// TODO: ...
sb.append("<login url=\"" + originalRequest + "&yanel.usecase=neutron-auth" + "\" method=\"POST\">");
//sb.append("<login url=\"" + getRequestURLQS(request, "yanel.usecase=neutron-auth", true) + "\" method=\"POST\">");
sb.append("<form>");
sb.append("<message>Enter username and password for \"" + realm.getName() + "\" at \"" + realm.getMountPoint() + "\"</message>");
sb.append("<param description=\"Username\" name=\"username\"/>");
sb.append("<param description=\"Password\" name=\"password\"/>");
sb.append("</form>");
sb.append("</login>");
// NOTE: Needs to be a full URL, because user might switch the server ...
// TODO: ...
sb.append("<logout url=\"" + originalRequest + "&yanel.usecase=logout" + "\" realm=\"" + realm.getName() + "\"/>");
sb.append("</authentication>");
sb.append("</exception>");
response.setContentType("application/xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
response.setHeader("WWW-Authenticate", "NEUTRON-AUTH");
PrintWriter writer = response.getWriter();
writer.print(sb);
return response;
}
} else {
log.debug("Neutron Authentication successful.");
return null;
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage(), e);
}
}
/**
*
*/
public HttpServletResponse doLogout(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
log.info("Logout from Yanel ...");
HttpSession session = request.getSession(true);
session.setAttribute(IDENTITY_KEY, null);
String clientSupportedAuthScheme = request.getHeader("WWW-Authenticate");
if (clientSupportedAuthScheme != null && clientSupportedAuthScheme.equals("Neutron-Auth")) {
// TODO: send some XML content, e.g. <logout-successful/>
response.setContentType("text/plain");
response.setStatus(response.SC_OK);
PrintWriter writer = response.getWriter();
writer.print("Neutron Logout Successful!");
return response;
}
return null;
}
/**
* Microsoft Internet Explorer does not understand application/xhtml+xml
* See http://en.wikipedia.org/wiki/Criticisms_of_Internet_Explorer#XHTML
*/
public String patchContentType(String contentType, HttpServletRequest request) throws ServletException, IOException {
String httpAcceptMediaTypes = request.getHeader("Accept");
log.debug("HTTP Accept Media Types: " + httpAcceptMediaTypes);
if (contentType != null && contentType.equals("application/xhtml+xml") && httpAcceptMediaTypes != null && httpAcceptMediaTypes.indexOf("application/xhtml+xml") < 0) {
log.info("Patch contentType with text/html because client (" + request.getHeader("User-Agent") + ") does not seem to understand application/xhtml+xml");
return "text/html";
}
return contentType;
}
/**
* Intercept InputStream and log content ...
*/
public InputStream intercept(InputStream in) throws IOException {
java.io.ByteArrayOutputStream baos = new java.io.ByteArrayOutputStream();
byte[] buf = new byte[8192];
int bytesR;
while ((bytesR = in.read(buf)) != -1) {
baos.write(buf, 0, bytesR);
}
// Buffer within memory (TODO: Maybe replace with File-buffering ...)
// http://www-128.ibm.com/developerworks/java/library/j-io1/
byte[] memBuffer = baos.toByteArray();
log.error("DEBUG: InputStream: " + baos);
return new java.io.ByteArrayInputStream(memBuffer);
}
/**
*
*/
private void setYanelOutput(HttpServletRequest request, HttpServletResponse response, Document doc) throws ServletException {
try {
String yanelFormat = request.getParameter("yanel.format");
if(yanelFormat != null && yanelFormat.equals("xml")) {
response.setContentType("application/xml");
OutputStream out = response.getOutputStream();
javax.xml.transform.TransformerFactory.newInstance().newTransformer().transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(out));
out.close();
} else {
response.setContentType("application/xhtml+xml");
Transformer transformer = TransformerFactory.newInstance().newTransformer(new StreamSource(xsltInfoAndException));
transformer.transform(new javax.xml.transform.dom.DOMSource(doc), new javax.xml.transform.stream.StreamResult(response.getWriter()));
}
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage());
}
}
/**
* Custom XHTML Form for authentication
*/
public void getXHTMLAuthenticationForm(HttpServletRequest request, HttpServletResponse response, Realm realm, String message) throws ServletException, IOException {
org.w3c.dom.Document doc = null;
javax.xml.parsers.DocumentBuilderFactory dbf= javax.xml.parsers.DocumentBuilderFactory.newInstance();
try {
javax.xml.parsers.DocumentBuilder parser = dbf.newDocumentBuilder();
org.w3c.dom.DOMImplementation impl = parser.getDOMImplementation();
org.w3c.dom.DocumentType doctype = null;
doc = impl.createDocument(NAMESPACE, "yanel", doctype);
Element rootElement = doc.getDocumentElement();
if (message != null) {
Element messageElement = (Element) rootElement.appendChild(doc.createElement("message"));
messageElement.appendChild(doc.createTextNode(message));
}
Element requestElement = (Element) rootElement.appendChild(doc.createElement("request"));
requestElement.setAttribute("urlqs", getRequestURLQS(request, null, true));
Element realmElement = (Element) rootElement.appendChild(doc.createElement("realm"));
realmElement.setAttribute("name", realm.getName());
realmElement.setAttribute("mount-point", realm.getMountPoint().toString());
response.setContentType("application/xhtml+xml; charset=UTF-8");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED);
Transformer transformer = TransformerFactory.newInstance().newTransformer(new StreamSource(xsltLoginScreen));
transformer.transform(new javax.xml.transform.dom.DOMSource(doc),
new javax.xml.transform.stream.StreamResult(response.getWriter()));
} catch (Exception e) {
log.error(e.getMessage(), e);
throw new ServletException(e.getMessage());
}
}
/**
* Write to output stream of modifiable resource
*/
private void write(InputStream in, OutputStream out, HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
if (out != null) {
log.debug("Content-Type: " + request.getContentType());
// TODO: Compare mime-type from response with mime-type of resource
//if (contentType.equals("text/xml")) { ... }
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = in.read(buffer)) != -1) {
out.write(buffer, 0, bytesRead);
}
out.flush();
out.close();
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<html>");
sb.append("<body>");
sb.append("<p>Data has been saved ...</p>");
sb.append("</body>");
sb.append("</html>");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_OK);
response.setContentType("application/xhtml+xml");
PrintWriter w = response.getWriter();
w.print(sb);
log.info("Data has been saved ...");
return;
} else {
log.error("OutputStream is null!");
StringBuffer sb = new StringBuffer();
sb.append("<?xml version=\"1.0\"?>");
sb.append("<html>");
sb.append("<body>");
sb.append("<p>Exception: OutputStream is null!</p>");
sb.append("</body>");
sb.append("</html>");
PrintWriter w = response.getWriter();
w.print(sb);
response.setContentType("application/xhtml+xml");
response.setStatus(javax.servlet.http.HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
return;
}
}
}
| namespace fixed
| src/core/java/org/wyona/yanel/servlet/YanelServlet.java | namespace fixed |
|
Java | apache-2.0 | 26b4cc679f0d813b3cb290a83bf2add00e504e73 | 0 | infraling/atomic,infraling/atomic | /*******************************************************************************
* Copyright 2016 Friedrich-Schiller-Universität Jena
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* Stephan Druskat - initial API and implementation
*******************************************************************************/
package org.corpus_tools.atomic.projects.wizard;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.corpus_tools.atomic.projects.Corpus;
import org.corpus_tools.atomic.projects.Document;
import org.corpus_tools.atomic.projects.ProjectNode;
import org.corpus_tools.atomic.ui.api.ExtendedViewerSupport;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.ITreeSelection;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Monitor;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.swt.widgets.Group;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.jface.databinding.swt.WidgetProperties;
import org.eclipse.jface.databinding.viewers.ViewersObservables;
import org.eclipse.core.databinding.beans.BeanProperties;
import org.eclipse.core.databinding.observable.value.ComputedValue;
import org.eclipse.core.databinding.observable.value.IObservableValue;
/**
* A wizard page for the user to construct the structure of a project.
* <p>
* FIXME: SWTBot test this class!
*
* @author Stephan Druskat <[email protected]>
*/
public class NewAtomicProjectWizardPageProjectStructure extends WizardPage {
private DataBindingContext bindingContext;
private Text nameText;
private Text addSubCorpusNameText;
private Text addDocumentNameText;
private Text documentNameText;
private Text sourceTextText;
private Corpus model = createNewProject();
private Text projectNameText;
private Set<Control> corpusConstrols = new HashSet<>(), documentControls = new HashSet<>();
private TreeViewer projectTreeViewer;
private Button btnRemoveElement;
private Button btnNewDocument;
private Button btnNewSubCorpus;
private Group grpDocument;
private Label lblSourceText;
private Button browseSourceTextBtn;
private Label lblName;
/**
* Default constructor calling the constructor {@link #NewAtomicProjectWizardPageProjectStructure(String)} with the default page name.
*/
public NewAtomicProjectWizardPageProjectStructure() {
super("Create the project structure");
setTitle("Create the project structure");
setDescription("Create the structure of the new project by adding corpora, subcorpora, and documents.");
/*
* FIXME TODO: Add context-sensitive help to Atomic, the the "?" button will show in the wizard. Add the following description to a help "window" of sorts: Every corpus must have a name and can contain n (sub-) corpora and n
* documents. Every document must have a name and must contain one source text. Must include Eclipse Help plugin for this.
*/
}
/**
* TODO: Description
*
* @return
*/
private Corpus createNewProject() {
Corpus project = new Corpus();
project.setName("Project");
Corpus root = new Corpus();
root.setName("Root corpus");
// Document d = new Document();
// d.setName("Document");
// root.addChild(d);
project.addChild(root);
return project;
}
/*
* @copydoc @see org.eclipse.jface.dialogs.IDialogPage#createControl(org.eclipse.swt.widgets.Composite)
*/
@Override
public void createControl(Composite parent) {
// Calculate and set good size and position for dialog
Monitor[] monitors = getShell().getDisplay().getMonitors();
Monitor activeMonitor = null;
Rectangle r = getShell().getBounds();
for (int i = 0; i < monitors.length; i++) {
if (monitors[i].getBounds().intersects(r)) {
activeMonitor = monitors[i];
}
}
Rectangle bounds = activeMonitor.getClientArea();
int boundsWidth = bounds.width;
int boundsHeight = bounds.height;
Point size = getShell().computeSize((int) (boundsWidth * (80.0f / 100.0f)), (int) (boundsHeight * (80.0f / 100.0f)));
int x = bounds.x + ((bounds.width - size.x) / 2);
getShell().setSize(size);
getShell().setLocation(x, 0);
// Create controls
Composite container = new Composite(parent, SWT.NULL);
setControl(container);
GridLayout layout = new GridLayout(1, false);
container.setLayout(layout);
// Project name
Group projectGroup = new Group(container, SWT.NONE);
projectGroup.setText("Project");
projectGroup.setLayout(new GridLayout(2, false));
projectGroup.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false, 1, 1));
Label lblName_2 = new Label(projectGroup, SWT.NONE);
lblName_2.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblName_2.setText("Name:");
projectNameText = new Text(projectGroup, SWT.BORDER);
projectNameText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
// Project contents
SashForm sashForm = new SashForm(container, SWT.HORIZONTAL);
sashForm.setLocation(0, 0);
GridData gridDataSashForm = new GridData(SWT.FILL, SWT.TOP, true, true, 1, 1);
gridDataSashForm.horizontalAlignment = SWT.FILL;
gridDataSashForm.verticalAlignment = SWT.FILL;
sashForm.setLayoutData(gridDataSashForm);
Composite leftComposite = new Composite(sashForm, SWT.NONE);
leftComposite.setLayout(new GridLayout(4, false));
final Button btnNewRootCorpus = new Button(leftComposite, SWT.NONE);
btnNewRootCorpus.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
String numberOfExistingRootCorpora = (getModel().getChildren().size() > 0) ? " " + String.valueOf(getModel().getChildren().size() + 1) : "";
Corpus newRootCorpus = new Corpus();
newRootCorpus.setName("Root corpus" + numberOfExistingRootCorpora);
getModel().addChild(newRootCorpus);
nameText.selectAll();
nameText.setFocus();
projectTreeViewer.refresh();
projectTreeViewer.setSelection(new StructuredSelection(newRootCorpus));
}
});
btnNewRootCorpus.setText("Add root corpus");
btnNewSubCorpus = new Button(leftComposite, SWT.NONE);
btnNewSubCorpus.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
// Can only be corpus, otherwise button will be disabled due to extra binding
Corpus parent = (Corpus) getSelectedElement();
Corpus newSubCorpus = new Corpus();
newSubCorpus.setName("New subcorpus");
parent.addChild(newSubCorpus);
projectTreeViewer.setSelection(new StructuredSelection(newSubCorpus));
nameText.selectAll();
nameText.setFocus();
projectTreeViewer.refresh();
projectTreeViewer.expandAll();
projectTreeViewer.setSelection(new StructuredSelection(newSubCorpus)); }
});
btnNewSubCorpus.setText("Add subcorpus");
btnNewDocument = new Button(leftComposite, SWT.NONE);
btnNewDocument.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
// Can only be corpus, otherwise button will be disabled due to extra binding
Corpus parent = (Corpus) getSelectedElement();
Document newDocument = new Document();
newDocument.setName("New document");
parent.addChild(newDocument);
projectTreeViewer.setSelection(new StructuredSelection(newDocument));
nameText.selectAll();
nameText.setFocus();
projectTreeViewer.refresh();
projectTreeViewer.expandAll();
projectTreeViewer.setSelection(new StructuredSelection(newDocument));
}
});
btnNewDocument.setText("Add document");
btnRemoveElement = new Button(leftComposite, SWT.NONE);
btnRemoveElement.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
TreeItem selectedItem = projectTreeViewer.getTree().getSelection()[0];
TreeItem parentItem = selectedItem.getParentItem();
Corpus parent;
int index;
if (parentItem == null) {
parent = getModel();
index = projectTreeViewer.getTree().indexOf(selectedItem);
}
else {
parent = (Corpus) parentItem.getData();
index = parentItem.indexOf(selectedItem);
}
List<ProjectNode> list = new ArrayList<ProjectNode>(parent.getChildren());
list.remove(index);
parent.setChildren(list);
}
});
btnRemoveElement.setText("Remove element");
projectTreeViewer = new TreeViewer(leftComposite, SWT.SINGLE | SWT.BORDER);
new Label(leftComposite, SWT.NONE);
projectTreeViewer.getTree().setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 4, 1));
projectTreeViewer.expandAll();
Composite rightComposite = new Composite(sashForm, SWT.NONE);
rightComposite.setLayout(new GridLayout(2, false));
lblName = new Label(rightComposite, SWT.NONE);
lblName.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblName.setText("Name:");
nameText = new Text(rightComposite, SWT.BORDER);
nameText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
grpDocument = new Group(rightComposite, SWT.NONE);
grpDocument.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1));
grpDocument.setLayout(new GridLayout(2, false));
grpDocument.setText("Document");
lblSourceText = new Label(grpDocument, SWT.NONE);
lblSourceText.setLayoutData(new GridData(SWT.RIGHT, SWT.TOP, false, false, 1, 1));
lblSourceText.setText("Source text:");
sourceTextText = new Text(grpDocument, SWT.BORDER | SWT.MULTI);
sourceTextText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
new Label(grpDocument, SWT.NONE);
browseSourceTextBtn = new Button(grpDocument, SWT.NONE);
browseSourceTextBtn.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
browseSourceTextBtn.setText("Browse");
sashForm.setWeights(new int[] { 1, 1 });
bindingContext = initDataBindings();
initExtraBindings(bindingContext);
}
/**
* Returns the selected element in the project tree viewer.
*
* @return the selected element in the project tree viewer
*/
protected ProjectNode getSelectedElement() {
IStructuredSelection selection = (IStructuredSelection) projectTreeViewer.getSelection();
if (selection.isEmpty())
return null;
return (ProjectNode) selection.getFirstElement();
}
@Override
public void performHelp() {
Shell shell = new Shell(getShell());
shell.setText("My Custom Help !!");
shell.setLayout(new GridLayout());
shell.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
Browser browser = new Browser(shell, SWT.NONE);
browser.setUrl("http://stackoverflow.com/questions/7322489/cant-put-content-behind-swt-wizard-help-button");
browser.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
shell.open();
}
/**
* @return the model
*/
public Corpus getModel() {
return model;
}
/**
* Initializes the data bindings from model to widgets and returns the binding context.
*
* @return bindingContext the binding context
*/
private DataBindingContext initDataBindings() {
DataBindingContext bindingContext = new DataBindingContext();
IObservableValue treeViewerSelectionObserveSelection = ViewersObservables.observeSingleSelection(projectTreeViewer);
IObservableValue textTextObserveWidget = WidgetProperties.text(SWT.Modify).observe(nameText);//SWTObservables.observeText(beanText, SWT.Modify);
IObservableValue treeViewerValueObserveDetailValue = BeanProperties.value("name").observeDetail(treeViewerSelectionObserveSelection);
bindingContext.bindValue(textTextObserveWidget, treeViewerValueObserveDetailValue);
return bindingContext;
}
/**
* Adds extra bindings that are not "real" data bindings (i.e., enabled/disabled buttons, etc.)
*
* @param bindingContext2
*/
private void initExtraBindings(DataBindingContext dbc) {
// Observable value for selected element in tree viewer
final IObservableValue projectTreeViewerSelection = ViewersObservables.observeSingleSelection(projectTreeViewer);
// Enable the "Add document" and "Add subcorpus" buttons only if the currently selected element in the tree is a Corpus
IObservableValue corpusSelected = new ComputedValue(Boolean.TYPE) {
protected Object calculate() {
return Boolean.valueOf(projectTreeViewerSelection.getValue() != null && projectTreeViewerSelection.getValue() instanceof Corpus);
}
};
dbc.bindValue(WidgetProperties.enabled().observe(btnNewDocument), corpusSelected);
dbc.bindValue(WidgetProperties.enabled().observe(btnNewSubCorpus), corpusSelected);
// Enable the "Remove element" button only if the currently selected element in the tree is not null
IObservableValue anythingSelected = new ComputedValue(Boolean.TYPE) {
protected Object calculate() {
return Boolean.valueOf(projectTreeViewerSelection.getValue() != null);
}
};
dbc.bindValue(WidgetProperties.enabled().observe(btnRemoveElement), anythingSelected);
dbc.bindValue(WidgetProperties.enabled().observe(lblName), anythingSelected);
dbc.bindValue(WidgetProperties.enabled().observe(nameText), anythingSelected);
// Disable all document-relevant widgets when selected element is not of type Document
IObservableValue documentSelected = new ComputedValue(Boolean.TYPE) {
protected Object calculate() {
return Boolean.valueOf(projectTreeViewerSelection.getValue() != null && projectTreeViewerSelection.getValue() instanceof Document);
}
};
dbc.bindValue(WidgetProperties.enabled().observe(grpDocument), documentSelected);
dbc.bindValue(WidgetProperties.enabled().observe(sourceTextText), documentSelected);
dbc.bindValue(WidgetProperties.enabled().observe(lblSourceText), documentSelected);
dbc.bindValue(WidgetProperties.enabled().observe(browseSourceTextBtn), documentSelected);
// Bind model to the project tree viewer
ExtendedViewerSupport.bind(projectTreeViewer, getModel(), BeanProperties.list("children", Corpus.class), BeanProperties.value(ProjectNode.class, "name"), ProjectTreeWizardLabelProvider.class);
}
}
| plugins/org.corpus-tools.atomic/src/main/java/org/corpus_tools/atomic/projects/wizard/NewAtomicProjectWizardPageProjectStructure.java | /*******************************************************************************
* Copyright 2016 Friedrich-Schiller-Universität Jena
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Contributors:
* Stephan Druskat - initial API and implementation
*******************************************************************************/
package org.corpus_tools.atomic.projects.wizard;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.corpus_tools.atomic.projects.Corpus;
import org.corpus_tools.atomic.projects.Document;
import org.corpus_tools.atomic.projects.ProjectNode;
import org.corpus_tools.atomic.ui.api.ExtendedViewerSupport;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ITreeSelection;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TreeViewer;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.browser.Browser;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Monitor;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.custom.SashForm;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.TreeItem;
import org.eclipse.ui.ISharedImages;
import org.eclipse.ui.PlatformUI;
import org.eclipse.swt.widgets.Group;
import org.eclipse.core.databinding.DataBindingContext;
import org.eclipse.core.databinding.property.list.IListProperty;
import org.eclipse.core.databinding.property.list.MultiListProperty;
import org.eclipse.jface.databinding.swt.WidgetProperties;
import org.eclipse.jface.databinding.viewers.ObservableListTreeContentProvider;
import org.eclipse.jface.databinding.viewers.ViewerSupport;
import org.eclipse.jface.databinding.viewers.ViewersObservables;
import org.eclipse.core.databinding.beans.BeanProperties;
import org.eclipse.core.databinding.observable.value.ComputedValue;
import org.eclipse.core.databinding.observable.value.IObservableValue;
/**
* A wizard page for the user to construct the structure of a project.
* <p>
* FIXME: SWTBot test this class!
*
* @author Stephan Druskat <[email protected]>
*/
public class NewAtomicProjectWizardPageProjectStructure extends WizardPage {
private DataBindingContext bindingContext;
private Text corpusNameText;
private Text addSubCorpusNameText;
private Text addDocumentNameText;
private Text documentNameText;
private Text sourceTextText;
private Corpus model = createNewProject();
private Text projectNameText;
private Set<Control> corpusConstrols = new HashSet<>(), documentControls = new HashSet<>();
private TreeViewer projectTreeViewer;
private Button btnRemoveElement;
/**
* Default constructor calling the constructor {@link #NewAtomicProjectWizardPageProjectStructure(String)} with the default page name.
*/
public NewAtomicProjectWizardPageProjectStructure() {
super("Create the project structure");
setTitle("Create the project structure");
setDescription("Create the structure of the new project by adding corpora, subcorpora, and documents.");
/*
* FIXME TODO: Add context-sensitive help to Atomic, the the "?" button will show in the wizard. Add the following description to a help "window" of sorts: Every corpus must have a name and can contain n (sub-) corpora and n
* documents. Every document must have a name and must contain one source text. Must include Eclipse Help plugin for this.
*/
}
/**
* TODO: Description
*
* @return
*/
private Corpus createNewProject() {
Corpus project = new Corpus();
project.setName("Project");
Corpus root = new Corpus();
root.setName("Root corpus");
Document d = new Document();
d.setName("Document");
root.addChild(d);
project.addChild(root);
return project;
}
/*
* @copydoc @see org.eclipse.jface.dialogs.IDialogPage#createControl(org.eclipse.swt.widgets.Composite)
*/
@Override
public void createControl(Composite parent) {
// Calculate and set good size and position for dialog
Monitor[] monitors = getShell().getDisplay().getMonitors();
Monitor activeMonitor = null;
Rectangle r = getShell().getBounds();
for (int i = 0; i < monitors.length; i++) {
if (monitors[i].getBounds().intersects(r)) {
activeMonitor = monitors[i];
}
}
Rectangle bounds = activeMonitor.getClientArea();
int boundsWidth = bounds.width;
int boundsHeight = bounds.height;
Point size = getShell().computeSize((int) (boundsWidth * (80.0f / 100.0f)), (int) (boundsHeight * (80.0f / 100.0f)));
int x = bounds.x + ((bounds.width - size.x) / 2);
getShell().setSize(size);
getShell().setLocation(x, 0);
// Create controls
Composite container = new Composite(parent, SWT.NULL);
setControl(container);
GridLayout layout = new GridLayout(1, false);
container.setLayout(layout);
// Project name
Group projectGroup = new Group(container, SWT.NONE);
projectGroup.setText("Project");
projectGroup.setLayout(new GridLayout(2, false));
projectGroup.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false, 1, 1));
Label lblName_2 = new Label(projectGroup, SWT.NONE);
lblName_2.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblName_2.setText("Name:");
projectNameText = new Text(projectGroup, SWT.BORDER);
projectNameText.setText("New project");
projectNameText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
// Project contents
SashForm sashForm = new SashForm(container, SWT.HORIZONTAL);
sashForm.setLocation(0, 0);
GridData gridDataSashForm = new GridData(SWT.FILL, SWT.TOP, true, true, 1, 1);
gridDataSashForm.horizontalAlignment = SWT.FILL;
gridDataSashForm.verticalAlignment = SWT.FILL;
sashForm.setLayoutData(gridDataSashForm);
Composite leftComposite = new Composite(sashForm, SWT.NONE);
leftComposite.setLayout(new GridLayout(2, false));
final Button btnNewCorpus = new Button(leftComposite, SWT.NONE);
btnNewCorpus.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
List<ProjectNode> corpusList = getModel().getChildren();
String numberOfExistingRootCorpora = (getModel().getChildren().size() > 0) ? " " + String.valueOf(getModel().getChildren().size() + 1) : "";
Corpus newRootCorpus = new Corpus();
newRootCorpus.setName("Root corpus" + numberOfExistingRootCorpora);
corpusList.add(newRootCorpus);
getModel().setChildren(corpusList);
corpusNameText.selectAll();
corpusNameText.setFocus();
projectTreeViewer.refresh();
projectTreeViewer.setSelection(new StructuredSelection(newRootCorpus));
}
});
btnNewCorpus.setText("New root corpus");
btnRemoveElement = new Button(leftComposite, SWT.NONE);
btnRemoveElement.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
TreeItem selectedItem = projectTreeViewer.getTree().getSelection()[0];
TreeItem parentItem = selectedItem.getParentItem();
Corpus parent;
int index;
if (parentItem == null) {
parent = getModel();
index = projectTreeViewer.getTree().indexOf(selectedItem);
}
else {
parent = (Corpus) parentItem.getData();
index = parentItem.indexOf(selectedItem);
}
List<ProjectNode> list = new ArrayList<ProjectNode>(parent.getChildren());
list.remove(index);
parent.setChildren(list);
}
});
btnRemoveElement.setText("Remove element");
projectTreeViewer = new TreeViewer(leftComposite, SWT.SINGLE);
new Label(leftComposite, SWT.NONE);
projectTreeViewer.getTree().setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1));
projectTreeViewer.expandAll();
Composite rightComposite = new Composite(sashForm, SWT.NONE);
rightComposite.setLayout(new GridLayout(1, false));
Group grpCorpus = new Group(rightComposite, SWT.NONE);
grpCorpus.setText("Corpus");
grpCorpus.setLayoutData(new GridData(SWT.FILL, SWT.TOP, true, false, 1, 1));
grpCorpus.setLayout(new GridLayout(3, false));
Label lblName = new Label(grpCorpus, SWT.NONE);
lblName.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblName.setText("Name:");
corpusNameText = new Text(grpCorpus, SWT.BORDER);
corpusNameText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
getCorpusControls().add(corpusNameText);
Button saveCorpusNameBtn = new Button(grpCorpus, SWT.NONE);
saveCorpusNameBtn.setText("Save");
getCorpusControls().add(saveCorpusNameBtn);
saveCorpusNameBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
// ProjectNode selectionParent = getCurrentSelectionParent();
}
});
Label lblAddSubcorpus = new Label(grpCorpus, SWT.NONE);
lblAddSubcorpus.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblAddSubcorpus.setText("Add subcorpus:");
addSubCorpusNameText = new Text(grpCorpus, SWT.BORDER);
addSubCorpusNameText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
getCorpusControls().add(addSubCorpusNameText);
Button addSubCorpusBtn = new Button(grpCorpus, SWT.NONE);
addSubCorpusBtn.setText("Add");
getCorpusControls().add(addSubCorpusBtn);
addSubCorpusBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
}
});
Label lblAddDocument = new Label(grpCorpus, SWT.NONE);
lblAddDocument.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblAddDocument.setText("Add document:");
addDocumentNameText = new Text(grpCorpus, SWT.BORDER);
addDocumentNameText.setLayoutData(new GridData(SWT.FILL, SWT.CENTER, true, false, 1, 1));
getCorpusControls().add(addDocumentNameText);
Button addDocumentBtn = new Button(grpCorpus, SWT.NONE);
addDocumentBtn.setText("Add");
getCorpusControls().add(addDocumentBtn);
addDocumentBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
}
});
Group grpDocument = new Group(rightComposite, SWT.NONE);
grpDocument.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 1, 1));
grpDocument.setLayout(new GridLayout(3, false));
grpDocument.setText("Document");
Label lblName_1 = new Label(grpDocument, SWT.NONE);
lblName_1.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
lblName_1.setText("Name:");
documentNameText = new Text(grpDocument, SWT.BORDER);
documentNameText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, false, 1, 1));
getDocumentControls().add(documentNameText);
Button saveDocumentNameBtn = new Button(grpDocument, SWT.NONE);
saveDocumentNameBtn.setText("Save");
getDocumentControls().add(saveDocumentNameBtn);
saveDocumentNameBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
}
});
Label lblSourceText = new Label(grpDocument, SWT.NONE);
lblSourceText.setLayoutData(new GridData(SWT.RIGHT, SWT.TOP, false, false, 1, 1));
lblSourceText.setText("Source text:");
sourceTextText = new Text(grpDocument, SWT.BORDER | SWT.MULTI);
sourceTextText.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1));
getDocumentControls().add(sourceTextText);
new Label(grpDocument, SWT.NONE);
Button browseSourceTextBtn = new Button(grpDocument, SWT.NONE);
browseSourceTextBtn.setLayoutData(new GridData(SWT.RIGHT, SWT.CENTER, false, false, 1, 1));
browseSourceTextBtn.setText("Browse");
getDocumentControls().add(browseSourceTextBtn);
Button saveSourceTextBtn = new Button(grpDocument, SWT.NONE);
saveSourceTextBtn.setText("Save");
getDocumentControls().add(saveSourceTextBtn);
saveSourceTextBtn.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
}
});
sashForm.setWeights(new int[] { 1, 1 });
bindingContext = initDataBindings();
initExtraBindings(bindingContext);
}
@Override
public void performHelp() {
Shell shell = new Shell(getShell());
shell.setText("My Custom Help !!");
shell.setLayout(new GridLayout());
shell.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
Browser browser = new Browser(shell, SWT.NONE);
browser.setUrl("http://stackoverflow.com/questions/7322489/cant-put-content-behind-swt-wizard-help-button");
browser.setLayoutData(new GridData(SWT.FILL, SWT.FILL, true, true));
shell.open();
}
private ProjectNode getCurrentSelectionParent() {
ISelection selection = projectTreeViewer.getSelection();
if (selection instanceof ITreeSelection) {
ITreeSelection treeSelection = (ITreeSelection) selection;
Object selectionParent = treeSelection.getPaths()[0].getParentPath().getLastSegment();
if (selectionParent instanceof ProjectNode) {
return (ProjectNode) selectionParent;
}
else {
return null;
}
}
return null;
}
/**
* @return the model
*/
public Corpus getModel() {
return model;
}
/**
* @return the corpusConstrols
*/
private Set<Control> getCorpusControls() {
return corpusConstrols;
}
/**
* @return the documentControls
*/
private Set<Control> getDocumentControls() {
return documentControls;
}
/**
* TODO: Description
*
* @return
*/
private DataBindingContext initDataBindings() {
DataBindingContext bindingContext = new DataBindingContext();
IObservableValue treeViewerSelectionObserveSelection = ViewersObservables.observeSingleSelection(projectTreeViewer);
IObservableValue textTextObserveWidget = WidgetProperties.text(SWT.Modify).observe(corpusNameText);//SWTObservables.observeText(beanText, SWT.Modify);
IObservableValue treeViewerValueObserveDetailValue = BeanProperties.value("name").observeDetail(treeViewerSelectionObserveSelection);
bindingContext.bindValue(textTextObserveWidget, treeViewerValueObserveDetailValue);
return bindingContext;
}
/**
* TODO: Description
*
* @param bindingContext2
*/
private void initExtraBindings(DataBindingContext dbc) {
final IObservableValue projectTreeViewerSelection = ViewersObservables.observeSingleSelection(projectTreeViewer);
IObservableValue projectElementSelected = new ComputedValue(Boolean.TYPE) {
protected Object calculate() {
return Boolean.valueOf(projectTreeViewerSelection.getValue() != null);
}
};
// dbc.bindValue(WidgetProperties.enabled().observe(btnNewCorpus), projectElementSelected);
ExtendedViewerSupport.bind(projectTreeViewer, getModel(), BeanProperties.list("children", Corpus.class), BeanProperties.value(ProjectNode.class, "name"), ProjectTreeWizardLabelProvider.class);
}
}
| Start cleaning up UI of NewAtomicProjectWizardPageProjectStructure | plugins/org.corpus-tools.atomic/src/main/java/org/corpus_tools/atomic/projects/wizard/NewAtomicProjectWizardPageProjectStructure.java | Start cleaning up UI of NewAtomicProjectWizardPageProjectStructure |
|
Java | apache-2.0 | 4e9daf60cd4752152e05bf3f27854b4266ab6711 | 0 | wso2/product-is,mefarazath/product-is,mefarazath/product-is,harsha1979/product-is,wso2/product-is,madurangasiriwardena/product-is,wso2/product-is,madurangasiriwardena/product-is,mefarazath/product-is,harsha1979/product-is,milindaperera/product-is,mefarazath/product-is,madurangasiriwardena/product-is,wso2/product-is,madurangasiriwardena/product-is,milindaperera/product-is,harsha1979/product-is,wso2/product-is,mefarazath/product-is,madurangasiriwardena/product-is,milindaperera/product-is,milindaperera/product-is,milindaperera/product-is,harsha1979/product-is | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.identity.integration.test.saml;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.core.StandardHost;
import org.apache.catalina.startup.Tomcat;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.opensaml.xml.util.Base64;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Factory;
import org.testng.annotations.Test;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.identity.application.common.model.xsd.Claim;
import org.wso2.carbon.identity.application.common.model.xsd.ClaimMapping;
import org.wso2.carbon.identity.application.common.model.xsd.InboundAuthenticationConfig;
import org.wso2.carbon.identity.application.common.model.xsd.InboundAuthenticationRequestConfig;
import org.wso2.carbon.identity.application.common.model.xsd.Property;
import org.wso2.carbon.identity.application.common.model.xsd.ServiceProvider;
import org.wso2.carbon.identity.sso.saml.stub.types.SAMLSSOServiceProviderDTO;
import org.wso2.carbon.um.ws.api.stub.ClaimValue;
import org.wso2.identity.integration.common.clients.application.mgt.ApplicationManagementServiceClient;
import org.wso2.identity.integration.common.clients.sso.saml.SAMLSSOConfigServiceClient;
import org.wso2.identity.integration.common.clients.sso.saml.query.ClientSignKeyDataHolder;
import org.wso2.identity.integration.common.clients.sso.saml.query.QueryClientUtils;
import org.wso2.identity.integration.common.clients.sso.saml.query.SAMLQueryClient;
import org.wso2.identity.integration.common.clients.usermgt.remote.RemoteUserStoreManagerServiceClient;
import org.wso2.identity.integration.common.utils.ISIntegrationTest;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Test case of SAMLQuery service
*/
public class SAMLQueryProfileTestCase extends ISIntegrationTest {
public static final String TENANT_DOMAIN_PARAM = "tenantDomain";
private static final Log log = LogFactory.getLog(SAMLQueryProfileTestCase.class);
// SAML Application attributes
private static final String USER_AGENT = "Apache-HttpClient/4.2.5 (java 1.5)";
private static final String APPLICATION_NAME = "SAML-SSO-Query-TestApplication";
private static final String INBOUND_AUTH_TYPE = "samlsso";
private static final String ATTRIBUTE_CS_INDEX_VALUE = "1239245949";
private static final String ATTRIBUTE_CS_INDEX_NAME = "attrConsumServiceIndex";
private static final String WSO2IS_URL = "https://localhost:9853/";
private static final String SAML_SSO_URL = WSO2IS_URL + "samlsso";
private static final String COMMON_AUTH_URL = WSO2IS_URL + "/commonauth";
private static final String ACS_URL = "http://localhost:8490/%s/home.jsp";
private static final String SAML_SSO_LOGIN_URL = "http://localhost:8490/%s/samlsso?SAML2.HTTPBinding=%s";
private static final String NAMEID_FORMAT = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress";
private static final String LOGIN_URL = "/carbon/admin/login.jsp";
//Claim Uris
private static final String firstNameClaimURI = "http://wso2.org/claims/givenname";
private static final String lastNameClaimURI = "http://wso2.org/claims/lastname";
private static final String emailClaimURI = "http://wso2.org/claims/emailaddress";
private static final String profileName = "default";
private ApplicationManagementServiceClient applicationManagementServiceClient;
private SAMLSSOConfigServiceClient ssoConfigServiceClient;
private RemoteUserStoreManagerServiceClient remoteUSMServiceClient;
private HttpClient httpClient;
private SAMLConfig config;
private Tomcat tomcatServer;
private String resultPage;
private String samlResponse;
@Factory(dataProvider = "samlConfigProvider")
public SAMLQueryProfileTestCase(SAMLConfig config) {
if (log.isDebugEnabled()) {
log.info("SAML SSO Test initialized for " + config);
}
this.config = config;
}
@DataProvider(name = "samlConfigProvider")
public static SAMLConfig[][] samlConfigProvider() {
return new SAMLConfig[][]{
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_REDIRECT,
ClaimType.NONE, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_REDIRECT,
ClaimType.LOCAL, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_POST,
ClaimType.NONE, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_POST,
ClaimType.LOCAL, App.SUPER_TENANT_APP_WITH_SIGNING)},
// {new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_REDIRECT,
// ClaimType.NONE, App.TENANT_APP_WITHOUT_SIGNING)},
// {new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_REDIRECT,
// ClaimType.LOCAL, App.TENANT_APP_WITHOUT_SIGNING)},
// {new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_POST,
// ClaimType.NONE, App.TENANT_APP_WITHOUT_SIGNING)},
// {new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_POST,
// ClaimType.LOCAL, App.TENANT_APP_WITHOUT_SIGNING)},
};
}
@BeforeClass(alwaysRun = true)
public void testInit() throws Exception {
super.init(config.getUserMode());
ConfigurationContext configContext = ConfigurationContextFactory
.createConfigurationContextFromFileSystem(null
, null);
applicationManagementServiceClient =
new ApplicationManagementServiceClient(sessionCookie, backendURL, configContext);
ssoConfigServiceClient =
new SAMLSSOConfigServiceClient(backendURL, sessionCookie);
remoteUSMServiceClient = new RemoteUserStoreManagerServiceClient(backendURL, sessionCookie);
httpClient = new DefaultHttpClient();
createUser();
createApplication();
//Starting tomcat
log.info("Starting Tomcat");
tomcatServer = getTomcat();
URL resourceUrl = getClass()
.getResource(File.separator + "samples" + File.separator + config.getApp().getArtifact() + ".war");
startTomcat(tomcatServer, "/" + config.getApp().getArtifact(), resourceUrl.getPath());
}
@AfterClass(alwaysRun = true)
public void testClear() throws Exception {
deleteUser();
deleteApplication();
ssoConfigServiceClient = null;
applicationManagementServiceClient = null;
remoteUSMServiceClient = null;
httpClient = null;
//Stopping tomcat
tomcatServer.stop();
tomcatServer.destroy();
Thread.sleep(10000);
}
@Test(description = "Add service provider", groups = "wso2.is", priority = 1)
public void testAddSP() throws Exception {
Boolean isAddSuccess = ssoConfigServiceClient
.addServiceProvider(createSsoServiceProviderDTO());
Assert.assertTrue(isAddSuccess, "Adding a service provider has failed for " + config);
SAMLSSOServiceProviderDTO[] samlssoServiceProviderDTOs = ssoConfigServiceClient
.getServiceProviders().getServiceProviders();
Assert.assertEquals(samlssoServiceProviderDTOs[0].getIssuer(), config.getApp().getArtifact(),
"Adding a service provider has failed for " + config);
}
@Test(description = "Remove service provider", groups = "wso2.is", dependsOnMethods = {"testClaims"})
public void testRemoveSP()
throws Exception {
Boolean isAddSuccess = ssoConfigServiceClient.removeServiceProvider(config.getApp().getArtifact());
Assert.assertTrue(isAddSuccess, "Removing a service provider has failed for " + config);
}
@Test(alwaysRun = true, description = "Testing SAML SSO login", groups = "wso2.is",
dependsOnMethods = {"testAddSP"})
public void testSAMLSSOLogin() {
try {
HttpResponse response;
response = sendGetRequest(
String.format(SAML_SSO_LOGIN_URL, config.getApp().getArtifact(), config.getHttpBinding()
.binding));
if (config.getHttpBinding() == HttpBinding.HTTP_POST) {
String samlRequest = extractDataFromResponse(response, "SAMLRequest", 5);
response = sendSAMLMessage(SAML_SSO_URL, "SAMLRequest", samlRequest);
EntityUtils.consume(response.getEntity());
response = sendRedirectRequest(response);
}
String sessionKey = extractDataFromResponse(response, "name=\"sessionDataKey\"", 1);
response = sendPOSTMessage(sessionKey);
EntityUtils.consume(response.getEntity());
response = sendRedirectRequest(response);
String samlResponse = extractDataFromResponse(response, "SAMLResponse", 5);
response = sendSAMLMessage(String.format(ACS_URL, config.getApp().getArtifact()), "SAMLResponse",
samlResponse);
resultPage = extractDataFromResponse(response);
Assert.assertTrue(resultPage.contains("You are logged in as " + config.getUser()
.getTenantAwareUsername()),
"SAML SSO Login failed for " + config);
this.samlResponse = new String(Base64.decode(samlResponse));
} catch (Exception e) {
Assert.fail("SAML SSO Login test failed for " + config, e);
}
}
@Test(alwaysRun = true, description = "Testing SAML SSO Claims", groups = "wso2.is",
dependsOnMethods = {"testSAMLAttributeQueryRequest"})
public void testClaims() {
String claimString = resultPage.substring(resultPage.lastIndexOf("<table>"));
switch (config.getClaimType()) {
case LOCAL:
assertLocalClaims(claimString);
break;
case NONE:
assertNoneClaims(claimString);
break;
}
}
@Test(alwaysRun = true, description = "Testing Assertion ID Request", groups = "wso2.is",
dependsOnMethods = {"testSAMLSSOLogin"})
public void testSAMLAssertionIDRequest() throws Exception {
try {
log.info("RESPONSE " + this.samlResponse);
String id = QueryClientUtils.getAssertionId(this.samlResponse);
URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator
+ "products" + File.separator + "wso2carbon.jks");
ClientSignKeyDataHolder signKeyDataHolder = null;
try {
signKeyDataHolder = new ClientSignKeyDataHolder(resourceUrl.getPath(),
"wso2carbon", "wso2carbon");
} catch (Exception e) {
Assert.fail("Unable to initiate client sign key data holder"+config, e);
}
SAMLQueryClient queryClient = new SAMLQueryClient(WSO2IS_URL, signKeyDataHolder);
String response = queryClient.executeIDRequest(config.getApp().getArtifact(), id);
Assert.assertTrue(response.contains(id));
} catch (Exception e) {
Assert.fail("SAML SSO Logout test failed for " + config, e);
}
}
@Test(alwaysRun = true, description = "Testing Attribute Query Request", groups = "wso2.is",
dependsOnMethods = {"testSAMLAssertionIDRequest"})
public void testSAMLAttributeQueryRequest() throws Exception {
try {
URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator
+ "products" + File.separator + "wso2carbon.jks");
ClientSignKeyDataHolder signKeyDataHolder = new ClientSignKeyDataHolder(resourceUrl.getPath(),
"wso2carbon", "wso2carbon");
SAMLQueryClient queryClient = new SAMLQueryClient(WSO2IS_URL, signKeyDataHolder);
List<String> attributes = new ArrayList<String>();
attributes.add(firstNameClaimURI);
attributes.add(lastNameClaimURI);
attributes.add(emailClaimURI);
String response = queryClient.executeAttributeQuery(config.getApp().getArtifact(),
config.getUser().getUsername(), attributes);
Assert.assertTrue(response.contains(config.getUser().getEmail()));
} catch (Exception e) {
Assert.fail("SAML SSO Logout test failed for " + config, e);
}
}
private void assertLocalClaims(String claims) {
Map<String, String> attributeMap = extractClaims(claims);
Assert.assertTrue(attributeMap.containsKey(firstNameClaimURI), "Claim nickname is expected");
Assert.assertEquals(attributeMap.get(firstNameClaimURI), config.getUser().getNickname(),
"Expected claim value for nickname is " + config.getUser().getNickname());
Assert.assertTrue(attributeMap.containsKey(lastNameClaimURI), "Claim lastname is expected");
Assert.assertEquals(attributeMap.get(lastNameClaimURI), config.getUser().getUsername(),
"Expected claim value for lastname is " + config.getUser().getUsername());
Assert.assertTrue(attributeMap.containsKey(emailClaimURI), "Claim email is expected");
Assert.assertEquals(attributeMap.get(emailClaimURI), config.getUser().getEmail(),
"Expected claim value for email is " + config.getUser().getEmail());
}
private void assertNoneClaims(String claims) {
String[] dataArray = StringUtils.substringsBetween(claims, "<td>", "</td>");
Assert.assertNull(dataArray, "Claims are not expected for " + config);
}
private void startTomcat(Tomcat tomcat, String webAppUrl, String webAppPath)
throws LifecycleException {
tomcat.addWebapp(tomcat.getHost(), webAppUrl, webAppPath);
tomcat.start();
}
private Tomcat getTomcat() {
Tomcat tomcat = new Tomcat();
tomcat.getService().setContainer(tomcat.getEngine());
tomcat.setPort(8490);
tomcat.setBaseDir("");
StandardHost stdHost = (StandardHost) tomcat.getHost();
stdHost.setAppBase("");
stdHost.setAutoDeploy(true);
stdHost.setDeployOnStartup(true);
stdHost.setUnpackWARs(true);
tomcat.setHost(stdHost);
setSystemProperties();
return tomcat;
}
private void setSystemProperties() {
URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator
+ "products" + File.separator + "wso2carbon.jks");
System.setProperty("javax.net.ssl.trustStore", resourceUrl.getPath());
System.setProperty("javax.net.ssl.trustStorePassword",
"wso2carbon");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
}
private String extractDataFromResponse(HttpResponse response, String key, int token)
throws IOException {
BufferedReader rd = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
String line;
String value = "";
while ((line = rd.readLine()) != null) {
if (line.contains(key)) {
String[] tokens = line.split("'");
value = tokens[token];
}
}
rd.close();
return value;
}
private HttpResponse sendPOSTMessage(String sessionKey) throws Exception {
HttpPost post = new HttpPost(COMMON_AUTH_URL);
post.setHeader("User-Agent", USER_AGENT);
post.addHeader("Referer", String.format(ACS_URL, config.getApp().getArtifact()));
List<NameValuePair> urlParameters = new ArrayList<NameValuePair>();
urlParameters.add(new BasicNameValuePair("username", config.getUser().getUsername()));
urlParameters.add(new BasicNameValuePair("password", config.getUser().getPassword()));
urlParameters.add(new BasicNameValuePair("sessionDataKey", sessionKey));
post.setEntity(new UrlEncodedFormEntity(urlParameters));
return httpClient.execute(post);
}
private HttpResponse sendGetRequest(String url) throws Exception {
HttpGet request = new HttpGet(url);
request.addHeader("User-Agent", USER_AGENT);
return httpClient.execute(request);
}
private HttpResponse sendSAMLMessage(String url, Map<String, String> parameters) throws IOException {
List<NameValuePair> urlParameters = new ArrayList<NameValuePair>();
HttpPost post = new HttpPost(url);
post.setHeader("User-Agent", USER_AGENT);
for (Map.Entry<String, String> entry : parameters.entrySet()) {
urlParameters.add(new BasicNameValuePair(entry.getKey(), entry.getValue()));
}
if (config.getUserMode() == TestUserMode.TENANT_ADMIN || config.getUserMode() == TestUserMode.TENANT_USER) {
urlParameters.add(new BasicNameValuePair(TENANT_DOMAIN_PARAM, config.getUser().getTenantDomain()));
}
post.setEntity(new UrlEncodedFormEntity(urlParameters));
return httpClient.execute(post);
}
private HttpResponse sendSAMLMessage(String url, String samlMsgKey, String samlMsgValue) throws IOException {
List<NameValuePair> urlParameters = new ArrayList<NameValuePair>();
HttpPost post = new HttpPost(url);
post.setHeader("User-Agent", USER_AGENT);
urlParameters.add(new BasicNameValuePair(samlMsgKey, samlMsgValue));
if (config.getUserMode() == TestUserMode.TENANT_ADMIN || config.getUserMode() == TestUserMode.TENANT_USER) {
urlParameters.add(new BasicNameValuePair(TENANT_DOMAIN_PARAM, config.getUser().getTenantDomain()));
}
post.setEntity(new UrlEncodedFormEntity(urlParameters));
return httpClient.execute(post);
}
private HttpResponse sendRedirectRequest(HttpResponse response) throws IOException {
Header[] headers = response.getAllHeaders();
String url = "";
for (Header header : headers) {
if ("Location".equals(header.getName())) {
url = header.getValue();
}
}
HttpGet request = new HttpGet(url);
request.addHeader("User-Agent", USER_AGENT);
request.addHeader("Referer", String.format(ACS_URL, config.getApp().getArtifact()));
return httpClient.execute(request);
}
private String extractDataFromResponse(HttpResponse response) throws IOException {
BufferedReader rd = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
StringBuilder result = new StringBuilder();
String line;
while ((line = rd.readLine()) != null) {
result.append(line);
}
rd.close();
return result.toString();
}
private Map<String, String> extractClaims(String claimString) {
String[] dataArray = StringUtils.substringsBetween(claimString, "<td>", "</td>");
Map<String, String> attributeMap = new HashMap<String, String>();
String key = null;
String value;
for (int i = 0; i < dataArray.length; i++) {
if ((i % 2) == 0) {
key = dataArray[i];
} else {
value = dataArray[i].trim();
attributeMap.put(key, value);
}
}
return attributeMap;
}
private void createApplication() throws Exception {
ServiceProvider serviceProvider = new ServiceProvider();
serviceProvider.setApplicationName(APPLICATION_NAME);
serviceProvider.setDescription("This is a test Service Provider");
applicationManagementServiceClient.createApplication(serviceProvider);
serviceProvider = applicationManagementServiceClient.getApplication(APPLICATION_NAME);
serviceProvider.getClaimConfig().setClaimMappings(getClaimMappings());
InboundAuthenticationRequestConfig requestConfig = new InboundAuthenticationRequestConfig();
requestConfig.setInboundAuthType(INBOUND_AUTH_TYPE);
requestConfig.setInboundAuthKey(config.getApp().getArtifact());
Property attributeConsumerServiceIndexProp = new Property();
attributeConsumerServiceIndexProp.setName(ATTRIBUTE_CS_INDEX_NAME);
attributeConsumerServiceIndexProp.setValue(ATTRIBUTE_CS_INDEX_VALUE);
requestConfig.setProperties(new Property[]{attributeConsumerServiceIndexProp});
InboundAuthenticationConfig inboundAuthenticationConfig = new InboundAuthenticationConfig();
inboundAuthenticationConfig.setInboundAuthenticationRequestConfigs(
new InboundAuthenticationRequestConfig[]{requestConfig});
serviceProvider.setInboundAuthenticationConfig(inboundAuthenticationConfig);
applicationManagementServiceClient.updateApplicationData(serviceProvider);
}
private void deleteApplication() throws Exception {
applicationManagementServiceClient.deleteApplication(APPLICATION_NAME);
}
private void createUser() {
log.info("Creating User " + config.getUser().getUsername());
try {
// creating the user
remoteUSMServiceClient.addUser(config.getUser().getTenantAwareUsername(), config.getUser().getPassword(),
null, getUserClaims(),
profileName, true);
} catch (Exception e) {
Assert.fail("Error while creating the user", e);
}
}
private void deleteUser() {
log.info("Deleting User " + config.getUser().getUsername());
try {
remoteUSMServiceClient.deleteUser(config.getUser().getTenantAwareUsername());
} catch (Exception e) {
Assert.fail("Error while deleting the user", e);
}
}
private SAMLSSOServiceProviderDTO createSsoServiceProviderDTO() {
SAMLSSOServiceProviderDTO samlssoServiceProviderDTO = new SAMLSSOServiceProviderDTO();
samlssoServiceProviderDTO.setIssuer(config.getApp().getArtifact());
samlssoServiceProviderDTO.setAssertionConsumerUrls(new String[]{String.format(ACS_URL,
config.getApp().getArtifact())});
samlssoServiceProviderDTO.setDefaultAssertionConsumerUrl(String.format(ACS_URL, config.getApp().getArtifact()));
samlssoServiceProviderDTO.setAttributeConsumingServiceIndex(ATTRIBUTE_CS_INDEX_VALUE);
samlssoServiceProviderDTO.setNameIDFormat(NAMEID_FORMAT);
samlssoServiceProviderDTO.setDoSignAssertions(config.getApp().isSigningEnabled());
samlssoServiceProviderDTO.setDoSignResponse(config.getApp().isSigningEnabled());
samlssoServiceProviderDTO.setDoSingleLogout(true);
samlssoServiceProviderDTO.setLoginPageURL(LOGIN_URL);
if (config.getClaimType() != ClaimType.NONE) {
samlssoServiceProviderDTO.setEnableAttributeProfile(true);
samlssoServiceProviderDTO.setEnableAttributesByDefault(true);
}
samlssoServiceProviderDTO.setCertAlias("wso2carbon");
return samlssoServiceProviderDTO;
}
private ClaimMapping[] getClaimMappings() {
List<ClaimMapping> claimMappingList = new ArrayList<ClaimMapping>();
Claim firstNameClaim = new Claim();
firstNameClaim.setClaimUri(firstNameClaimURI);
ClaimMapping firstNameClaimMapping = new ClaimMapping();
firstNameClaimMapping.setRequested(true);
firstNameClaimMapping.setLocalClaim(firstNameClaim);
firstNameClaimMapping.setRemoteClaim(firstNameClaim);
claimMappingList.add(firstNameClaimMapping);
Claim lastNameClaim = new Claim();
lastNameClaim.setClaimUri(lastNameClaimURI);
ClaimMapping lastNameClaimMapping = new ClaimMapping();
lastNameClaimMapping.setRequested(true);
lastNameClaimMapping.setLocalClaim(lastNameClaim);
lastNameClaimMapping.setRemoteClaim(lastNameClaim);
claimMappingList.add(lastNameClaimMapping);
Claim emailClaim = new Claim();
emailClaim.setClaimUri(emailClaimURI);
ClaimMapping emailClaimMapping = new ClaimMapping();
emailClaimMapping.setRequested(true);
emailClaimMapping.setLocalClaim(emailClaim);
emailClaimMapping.setRemoteClaim(emailClaim);
claimMappingList.add(emailClaimMapping);
return claimMappingList.toArray(new ClaimMapping[claimMappingList.size()]);
}
private ClaimValue[] getUserClaims() {
ClaimValue[] claimValues = new ClaimValue[3];
ClaimValue firstName = new ClaimValue();
firstName.setClaimURI(firstNameClaimURI);
firstName.setValue(config.getUser().getNickname());
claimValues[0] = firstName;
ClaimValue lastName = new ClaimValue();
lastName.setClaimURI(lastNameClaimURI);
lastName.setValue(config.getUser().getUsername());
claimValues[1] = lastName;
ClaimValue email = new ClaimValue();
email.setClaimURI(emailClaimURI);
email.setValue(config.getUser().getEmail());
claimValues[2] = email;
return claimValues;
}
private enum HttpBinding {
HTTP_REDIRECT("HTTP-Redirect"),
HTTP_POST("HTTP-POST");
String binding;
HttpBinding(String binding) {
this.binding = binding;
}
}
private enum ClaimType {
LOCAL, CUSTOM, NONE
}
private enum User {
SUPER_TENANT_USER("samluser1", "samluser1", "carbon.super", "samluser1", "[email protected]", "samlnickuser1"),
TENANT_USER("[email protected]", "samluser2", "wso2.com", "samluser2", "[email protected]", "samlnickuser2");
private String username;
private String password;
private String tenantDomain;
private String tenantAwareUsername;
private String email;
private String nickname;
User(String username, String password, String tenantDomain, String tenantAwareUsername, String email,
String nickname) {
this.username = username;
this.password = password;
this.tenantDomain = tenantDomain;
this.tenantAwareUsername = tenantAwareUsername;
this.email = email;
this.nickname = nickname;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
public String getTenantDomain() {
return tenantDomain;
}
public String getTenantAwareUsername() {
return tenantAwareUsername;
}
public String getEmail() {
return email;
}
public String getNickname() {
return nickname;
}
}
private enum App {
SUPER_TENANT_APP_WITH_SIGNING("travelocity.com", true),
TENANT_APP_WITHOUT_SIGNING("travelocity.com-saml-tenantwithoutsigning", false);
private String artifact;
private boolean signingEnabled;
App(String artifact, boolean signingEnabled) {
this.artifact = artifact;
this.signingEnabled = signingEnabled;
}
public String getArtifact() {
return artifact;
}
public boolean isSigningEnabled() {
return signingEnabled;
}
}
private static class SAMLConfig {
private TestUserMode userMode;
private User user;
private HttpBinding httpBinding;
private ClaimType claimType;
private App app;
/**
* Constructor
* @param userMode User mode
* @param user subject
* @param httpBinding Http binding of request
* @param claimType Claim types
* @param app Client application
*/
private SAMLConfig(TestUserMode userMode, User user, HttpBinding httpBinding, ClaimType claimType, App app) {
this.userMode = userMode;
this.user = user;
this.httpBinding = httpBinding;
this.claimType = claimType;
this.app = app;
}
public TestUserMode getUserMode() {
return userMode;
}
public App getApp() {
return app;
}
public User getUser() {
return user;
}
public ClaimType getClaimType() {
return claimType;
}
public HttpBinding getHttpBinding() {
return httpBinding;
}
@Override
public String toString() {
return "SAMLConfig[" +
", userMode=" + userMode.name() +
", user=" + user.getUsername() +
", httpBinding=" + httpBinding +
", claimType=" + claimType +
", app=" + app.getArtifact() +
']';
}
}
} | modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/identity/integration/test/saml/SAMLQueryProfileTestCase.java | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.identity.integration.test.saml;
import org.apache.axis2.context.ConfigurationContext;
import org.apache.axis2.context.ConfigurationContextFactory;
import org.apache.catalina.LifecycleException;
import org.apache.catalina.core.StandardHost;
import org.apache.catalina.startup.Tomcat;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.http.Header;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpGet;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.util.EntityUtils;
import org.opensaml.xml.util.Base64;
import org.testng.Assert;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Factory;
import org.testng.annotations.Test;
import org.wso2.carbon.automation.engine.context.TestUserMode;
import org.wso2.carbon.identity.application.common.model.xsd.Claim;
import org.wso2.carbon.identity.application.common.model.xsd.ClaimMapping;
import org.wso2.carbon.identity.application.common.model.xsd.InboundAuthenticationConfig;
import org.wso2.carbon.identity.application.common.model.xsd.InboundAuthenticationRequestConfig;
import org.wso2.carbon.identity.application.common.model.xsd.Property;
import org.wso2.carbon.identity.application.common.model.xsd.ServiceProvider;
import org.wso2.carbon.identity.sso.saml.stub.types.SAMLSSOServiceProviderDTO;
import org.wso2.carbon.um.ws.api.stub.ClaimValue;
import org.wso2.identity.integration.common.clients.application.mgt.ApplicationManagementServiceClient;
import org.wso2.identity.integration.common.clients.sso.saml.SAMLSSOConfigServiceClient;
import org.wso2.identity.integration.common.clients.sso.saml.query.ClientSignKeyDataHolder;
import org.wso2.identity.integration.common.clients.sso.saml.query.QueryClientUtils;
import org.wso2.identity.integration.common.clients.sso.saml.query.SAMLQueryClient;
import org.wso2.identity.integration.common.clients.usermgt.remote.RemoteUserStoreManagerServiceClient;
import org.wso2.identity.integration.common.utils.ISIntegrationTest;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Test case of SAMLQuery service
*/
public class SAMLQueryProfileTestCase extends ISIntegrationTest {
public static final String TENANT_DOMAIN_PARAM = "tenantDomain";
private static final Log log = LogFactory.getLog(SAMLQueryProfileTestCase.class);
// SAML Application attributes
private static final String USER_AGENT = "Apache-HttpClient/4.2.5 (java 1.5)";
private static final String APPLICATION_NAME = "SAML-SSO-Query-TestApplication";
private static final String INBOUND_AUTH_TYPE = "samlsso";
private static final String ATTRIBUTE_CS_INDEX_VALUE = "1239245949";
private static final String ATTRIBUTE_CS_INDEX_NAME = "attrConsumServiceIndex";
private static final String WSO2IS_URL = "https://localhost:9853/";
private static final String SAML_SSO_URL = WSO2IS_URL + "samlsso";
private static final String COMMON_AUTH_URL = WSO2IS_URL + "/commonauth";
private static final String ACS_URL = "http://localhost:8490/%s/home.jsp";
private static final String SAML_SSO_LOGIN_URL = "http://localhost:8490/%s/samlsso?SAML2.HTTPBinding=%s";
private static final String NAMEID_FORMAT = "urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress";
private static final String LOGIN_URL = "/carbon/admin/login.jsp";
//Claim Uris
private static final String firstNameClaimURI = "http://wso2.org/claims/givenname";
private static final String lastNameClaimURI = "http://wso2.org/claims/lastname";
private static final String emailClaimURI = "http://wso2.org/claims/emailaddress";
private static final String profileName = "default";
private ApplicationManagementServiceClient applicationManagementServiceClient;
private SAMLSSOConfigServiceClient ssoConfigServiceClient;
private RemoteUserStoreManagerServiceClient remoteUSMServiceClient;
private HttpClient httpClient;
private SAMLConfig config;
private Tomcat tomcatServer;
private String resultPage;
private String samlResponse;
@Factory(dataProvider = "samlConfigProvider")
public SAMLQueryProfileTestCase(SAMLConfig config) {
if (log.isDebugEnabled()) {
log.info("SAML SSO Test initialized for " + config);
}
this.config = config;
}
@DataProvider(name = "samlConfigProvider")
public static SAMLConfig[][] samlConfigProvider() {
return new SAMLConfig[][]{
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_REDIRECT,
ClaimType.NONE, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_REDIRECT,
ClaimType.LOCAL, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_POST,
ClaimType.NONE, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.SUPER_TENANT_ADMIN, User.SUPER_TENANT_USER, HttpBinding.HTTP_POST,
ClaimType.LOCAL, App.SUPER_TENANT_APP_WITH_SIGNING)},
{new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_REDIRECT,
ClaimType.NONE, App.TENANT_APP_WITHOUT_SIGNING)},
{new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_REDIRECT,
ClaimType.LOCAL, App.TENANT_APP_WITHOUT_SIGNING)},
{new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_POST,
ClaimType.NONE, App.TENANT_APP_WITHOUT_SIGNING)},
{new SAMLConfig(TestUserMode.TENANT_ADMIN, User.TENANT_USER, HttpBinding.HTTP_POST,
ClaimType.LOCAL, App.TENANT_APP_WITHOUT_SIGNING)},
};
}
@BeforeClass(alwaysRun = true)
public void testInit() throws Exception {
super.init(config.getUserMode());
ConfigurationContext configContext = ConfigurationContextFactory
.createConfigurationContextFromFileSystem(null
, null);
applicationManagementServiceClient =
new ApplicationManagementServiceClient(sessionCookie, backendURL, configContext);
ssoConfigServiceClient =
new SAMLSSOConfigServiceClient(backendURL, sessionCookie);
remoteUSMServiceClient = new RemoteUserStoreManagerServiceClient(backendURL, sessionCookie);
httpClient = new DefaultHttpClient();
createUser();
createApplication();
//Starting tomcat
log.info("Starting Tomcat");
tomcatServer = getTomcat();
URL resourceUrl = getClass()
.getResource(File.separator + "samples" + File.separator + config.getApp().getArtifact() + ".war");
startTomcat(tomcatServer, "/" + config.getApp().getArtifact(), resourceUrl.getPath());
}
@AfterClass(alwaysRun = true)
public void testClear() throws Exception {
deleteUser();
deleteApplication();
ssoConfigServiceClient = null;
applicationManagementServiceClient = null;
remoteUSMServiceClient = null;
httpClient = null;
//Stopping tomcat
tomcatServer.stop();
tomcatServer.destroy();
Thread.sleep(10000);
}
@Test(description = "Add service provider", groups = "wso2.is", priority = 1)
public void testAddSP() throws Exception {
Boolean isAddSuccess = ssoConfigServiceClient
.addServiceProvider(createSsoServiceProviderDTO());
Assert.assertTrue(isAddSuccess, "Adding a service provider has failed for " + config);
SAMLSSOServiceProviderDTO[] samlssoServiceProviderDTOs = ssoConfigServiceClient
.getServiceProviders().getServiceProviders();
Assert.assertEquals(samlssoServiceProviderDTOs[0].getIssuer(), config.getApp().getArtifact(),
"Adding a service provider has failed for " + config);
}
@Test(description = "Remove service provider", groups = "wso2.is", dependsOnMethods = {"testSAMLAssertionIDRequest"})
public void testRemoveSP()
throws Exception {
Boolean isAddSuccess = ssoConfigServiceClient.removeServiceProvider(config.getApp().getArtifact());
Assert.assertTrue(isAddSuccess, "Removing a service provider has failed for " + config);
}
@Test(alwaysRun = true, description = "Testing SAML SSO login", groups = "wso2.is",
dependsOnMethods = {"testAddSP"})
public void testSAMLSSOLogin() {
try {
HttpResponse response;
response = sendGetRequest(
String.format(SAML_SSO_LOGIN_URL, config.getApp().getArtifact(), config.getHttpBinding()
.binding));
if (config.getHttpBinding() == HttpBinding.HTTP_POST) {
String samlRequest = extractDataFromResponse(response, "SAMLRequest", 5);
response = sendSAMLMessage(SAML_SSO_URL, "SAMLRequest", samlRequest);
EntityUtils.consume(response.getEntity());
response = sendRedirectRequest(response);
}
String sessionKey = extractDataFromResponse(response, "name=\"sessionDataKey\"", 1);
response = sendPOSTMessage(sessionKey);
EntityUtils.consume(response.getEntity());
response = sendRedirectRequest(response);
String samlResponse = extractDataFromResponse(response, "SAMLResponse", 5);
response = sendSAMLMessage(String.format(ACS_URL, config.getApp().getArtifact()), "SAMLResponse",
samlResponse);
resultPage = extractDataFromResponse(response);
Assert.assertTrue(resultPage.contains("You are logged in as " + config.getUser()
.getTenantAwareUsername()),
"SAML SSO Login failed for " + config);
this.samlResponse = new String(Base64.decode(samlResponse));
} catch (Exception e) {
Assert.fail("SAML SSO Login test failed for " + config, e);
}
}
@Test(alwaysRun = true, description = "Testing SAML SSO Claims", groups = "wso2.is",
dependsOnMethods = {"testSAMLSSOLogin"})
public void testClaims() {
String claimString = resultPage.substring(resultPage.lastIndexOf("<table>"));
switch (config.getClaimType()) {
case LOCAL:
assertLocalClaims(claimString);
break;
case NONE:
assertNoneClaims(claimString);
break;
}
}
@Test(alwaysRun = true, description = "Testing Assertion ID Request", groups = "wso2.is",
dependsOnMethods = {"testSAMLSSOLogin"})
public void testSAMLAssertionIDRequest() throws Exception {
try {
log.info("RESPONSE " + this.samlResponse);
String id = QueryClientUtils.getAssertionId(this.samlResponse);
URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator
+ "products" + File.separator + "wso2carbon.jks");
ClientSignKeyDataHolder signKeyDataHolder = null;
try {
signKeyDataHolder = new ClientSignKeyDataHolder(resourceUrl.getPath(),
"wso2carbon", "wso2carbon");
} catch (Exception e) {
Assert.fail("Unable to initiate client sign key data holder"+config, e);
}
SAMLQueryClient queryClient = new SAMLQueryClient(WSO2IS_URL, signKeyDataHolder);
String response = queryClient.executeIDRequest(config.getApp().getArtifact(), id);
Assert.assertTrue(response.contains(id));
} catch (Exception e) {
Assert.fail("SAML SSO Logout test failed for " + config, e);
}
}
@Test(alwaysRun = true, description = "Testing Attribute Query Request", groups = "wso2.is",
dependsOnMethods = {"testSAMLSSOLogin"})
public void testSAMLAttributeQueryRequest() throws Exception {
try {
URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator
+ "products" + File.separator + "wso2carbon.jks");
ClientSignKeyDataHolder signKeyDataHolder = new ClientSignKeyDataHolder(resourceUrl.getPath(),
"wso2carbon", "wso2carbon");
SAMLQueryClient queryClient = new SAMLQueryClient(WSO2IS_URL, signKeyDataHolder);
List<String> attributes = new ArrayList<String>();
attributes.add(firstNameClaimURI);
attributes.add(lastNameClaimURI);
attributes.add(emailClaimURI);
String response = queryClient.executeAttributeQuery(config.getApp().getArtifact(),
config.getUser().getUsername(), attributes);
Assert.assertTrue(response.contains(config.getUser().getEmail()));
} catch (Exception e) {
Assert.fail("SAML SSO Logout test failed for " + config, e);
}
}
private void assertLocalClaims(String claims) {
Map<String, String> attributeMap = extractClaims(claims);
Assert.assertTrue(attributeMap.containsKey(firstNameClaimURI), "Claim nickname is expected");
Assert.assertEquals(attributeMap.get(firstNameClaimURI), config.getUser().getNickname(),
"Expected claim value for nickname is " + config.getUser().getNickname());
Assert.assertTrue(attributeMap.containsKey(lastNameClaimURI), "Claim lastname is expected");
Assert.assertEquals(attributeMap.get(lastNameClaimURI), config.getUser().getUsername(),
"Expected claim value for lastname is " + config.getUser().getUsername());
Assert.assertTrue(attributeMap.containsKey(emailClaimURI), "Claim email is expected");
Assert.assertEquals(attributeMap.get(emailClaimURI), config.getUser().getEmail(),
"Expected claim value for email is " + config.getUser().getEmail());
}
private void assertNoneClaims(String claims) {
String[] dataArray = StringUtils.substringsBetween(claims, "<td>", "</td>");
Assert.assertNull(dataArray, "Claims are not expected for " + config);
}
private void startTomcat(Tomcat tomcat, String webAppUrl, String webAppPath)
throws LifecycleException {
tomcat.addWebapp(tomcat.getHost(), webAppUrl, webAppPath);
tomcat.start();
}
private Tomcat getTomcat() {
Tomcat tomcat = new Tomcat();
tomcat.getService().setContainer(tomcat.getEngine());
tomcat.setPort(8490);
tomcat.setBaseDir("");
StandardHost stdHost = (StandardHost) tomcat.getHost();
stdHost.setAppBase("");
stdHost.setAutoDeploy(true);
stdHost.setDeployOnStartup(true);
stdHost.setUnpackWARs(true);
tomcat.setHost(stdHost);
setSystemProperties();
return tomcat;
}
private void setSystemProperties() {
URL resourceUrl = getClass().getResource(File.separator + "keystores" + File.separator
+ "products" + File.separator + "wso2carbon.jks");
System.setProperty("javax.net.ssl.trustStore", resourceUrl.getPath());
System.setProperty("javax.net.ssl.trustStorePassword",
"wso2carbon");
System.setProperty("javax.net.ssl.trustStoreType", "JKS");
}
private String extractDataFromResponse(HttpResponse response, String key, int token)
throws IOException {
BufferedReader rd = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
String line;
String value = "";
while ((line = rd.readLine()) != null) {
if (line.contains(key)) {
String[] tokens = line.split("'");
value = tokens[token];
}
}
rd.close();
return value;
}
private HttpResponse sendPOSTMessage(String sessionKey) throws Exception {
HttpPost post = new HttpPost(COMMON_AUTH_URL);
post.setHeader("User-Agent", USER_AGENT);
post.addHeader("Referer", String.format(ACS_URL, config.getApp().getArtifact()));
List<NameValuePair> urlParameters = new ArrayList<NameValuePair>();
urlParameters.add(new BasicNameValuePair("username", config.getUser().getUsername()));
urlParameters.add(new BasicNameValuePair("password", config.getUser().getPassword()));
urlParameters.add(new BasicNameValuePair("sessionDataKey", sessionKey));
post.setEntity(new UrlEncodedFormEntity(urlParameters));
return httpClient.execute(post);
}
private HttpResponse sendGetRequest(String url) throws Exception {
HttpGet request = new HttpGet(url);
request.addHeader("User-Agent", USER_AGENT);
return httpClient.execute(request);
}
private HttpResponse sendSAMLMessage(String url, Map<String, String> parameters) throws IOException {
List<NameValuePair> urlParameters = new ArrayList<NameValuePair>();
HttpPost post = new HttpPost(url);
post.setHeader("User-Agent", USER_AGENT);
for (Map.Entry<String, String> entry : parameters.entrySet()) {
urlParameters.add(new BasicNameValuePair(entry.getKey(), entry.getValue()));
}
if (config.getUserMode() == TestUserMode.TENANT_ADMIN || config.getUserMode() == TestUserMode.TENANT_USER) {
urlParameters.add(new BasicNameValuePair(TENANT_DOMAIN_PARAM, config.getUser().getTenantDomain()));
}
post.setEntity(new UrlEncodedFormEntity(urlParameters));
return httpClient.execute(post);
}
private HttpResponse sendSAMLMessage(String url, String samlMsgKey, String samlMsgValue) throws IOException {
List<NameValuePair> urlParameters = new ArrayList<NameValuePair>();
HttpPost post = new HttpPost(url);
post.setHeader("User-Agent", USER_AGENT);
urlParameters.add(new BasicNameValuePair(samlMsgKey, samlMsgValue));
if (config.getUserMode() == TestUserMode.TENANT_ADMIN || config.getUserMode() == TestUserMode.TENANT_USER) {
urlParameters.add(new BasicNameValuePair(TENANT_DOMAIN_PARAM, config.getUser().getTenantDomain()));
}
post.setEntity(new UrlEncodedFormEntity(urlParameters));
return httpClient.execute(post);
}
private HttpResponse sendRedirectRequest(HttpResponse response) throws IOException {
Header[] headers = response.getAllHeaders();
String url = "";
for (Header header : headers) {
if ("Location".equals(header.getName())) {
url = header.getValue();
}
}
HttpGet request = new HttpGet(url);
request.addHeader("User-Agent", USER_AGENT);
request.addHeader("Referer", String.format(ACS_URL, config.getApp().getArtifact()));
return httpClient.execute(request);
}
private String extractDataFromResponse(HttpResponse response) throws IOException {
BufferedReader rd = new BufferedReader(
new InputStreamReader(response.getEntity().getContent()));
StringBuilder result = new StringBuilder();
String line;
while ((line = rd.readLine()) != null) {
result.append(line);
}
rd.close();
return result.toString();
}
private Map<String, String> extractClaims(String claimString) {
String[] dataArray = StringUtils.substringsBetween(claimString, "<td>", "</td>");
Map<String, String> attributeMap = new HashMap<String, String>();
String key = null;
String value;
for (int i = 0; i < dataArray.length; i++) {
if ((i % 2) == 0) {
key = dataArray[i];
} else {
value = dataArray[i].trim();
attributeMap.put(key, value);
}
}
return attributeMap;
}
private void createApplication() throws Exception {
ServiceProvider serviceProvider = new ServiceProvider();
serviceProvider.setApplicationName(APPLICATION_NAME);
serviceProvider.setDescription("This is a test Service Provider");
applicationManagementServiceClient.createApplication(serviceProvider);
serviceProvider = applicationManagementServiceClient.getApplication(APPLICATION_NAME);
serviceProvider.getClaimConfig().setClaimMappings(getClaimMappings());
InboundAuthenticationRequestConfig requestConfig = new InboundAuthenticationRequestConfig();
requestConfig.setInboundAuthType(INBOUND_AUTH_TYPE);
requestConfig.setInboundAuthKey(config.getApp().getArtifact());
Property attributeConsumerServiceIndexProp = new Property();
attributeConsumerServiceIndexProp.setName(ATTRIBUTE_CS_INDEX_NAME);
attributeConsumerServiceIndexProp.setValue(ATTRIBUTE_CS_INDEX_VALUE);
requestConfig.setProperties(new Property[]{attributeConsumerServiceIndexProp});
InboundAuthenticationConfig inboundAuthenticationConfig = new InboundAuthenticationConfig();
inboundAuthenticationConfig.setInboundAuthenticationRequestConfigs(
new InboundAuthenticationRequestConfig[]{requestConfig});
serviceProvider.setInboundAuthenticationConfig(inboundAuthenticationConfig);
applicationManagementServiceClient.updateApplicationData(serviceProvider);
}
private void deleteApplication() throws Exception {
applicationManagementServiceClient.deleteApplication(APPLICATION_NAME);
}
private void createUser() {
log.info("Creating User " + config.getUser().getUsername());
try {
// creating the user
remoteUSMServiceClient.addUser(config.getUser().getTenantAwareUsername(), config.getUser().getPassword(),
null, getUserClaims(),
profileName, true);
} catch (Exception e) {
Assert.fail("Error while creating the user", e);
}
}
private void deleteUser() {
log.info("Deleting User " + config.getUser().getUsername());
try {
remoteUSMServiceClient.deleteUser(config.getUser().getTenantAwareUsername());
} catch (Exception e) {
Assert.fail("Error while deleting the user", e);
}
}
private SAMLSSOServiceProviderDTO createSsoServiceProviderDTO() {
SAMLSSOServiceProviderDTO samlssoServiceProviderDTO = new SAMLSSOServiceProviderDTO();
samlssoServiceProviderDTO.setIssuer(config.getApp().getArtifact());
samlssoServiceProviderDTO.setAssertionConsumerUrls(new String[]{String.format(ACS_URL,
config.getApp().getArtifact())});
samlssoServiceProviderDTO.setDefaultAssertionConsumerUrl(String.format(ACS_URL, config.getApp().getArtifact()));
samlssoServiceProviderDTO.setAttributeConsumingServiceIndex(ATTRIBUTE_CS_INDEX_VALUE);
samlssoServiceProviderDTO.setNameIDFormat(NAMEID_FORMAT);
samlssoServiceProviderDTO.setDoSignAssertions(config.getApp().isSigningEnabled());
samlssoServiceProviderDTO.setDoSignResponse(config.getApp().isSigningEnabled());
samlssoServiceProviderDTO.setDoSingleLogout(true);
samlssoServiceProviderDTO.setLoginPageURL(LOGIN_URL);
if (config.getClaimType() != ClaimType.NONE) {
samlssoServiceProviderDTO.setEnableAttributeProfile(true);
samlssoServiceProviderDTO.setEnableAttributesByDefault(true);
}
return samlssoServiceProviderDTO;
}
private ClaimMapping[] getClaimMappings() {
List<ClaimMapping> claimMappingList = new ArrayList<ClaimMapping>();
Claim firstNameClaim = new Claim();
firstNameClaim.setClaimUri(firstNameClaimURI);
ClaimMapping firstNameClaimMapping = new ClaimMapping();
firstNameClaimMapping.setRequested(true);
firstNameClaimMapping.setLocalClaim(firstNameClaim);
firstNameClaimMapping.setRemoteClaim(firstNameClaim);
claimMappingList.add(firstNameClaimMapping);
Claim lastNameClaim = new Claim();
lastNameClaim.setClaimUri(lastNameClaimURI);
ClaimMapping lastNameClaimMapping = new ClaimMapping();
lastNameClaimMapping.setRequested(true);
lastNameClaimMapping.setLocalClaim(lastNameClaim);
lastNameClaimMapping.setRemoteClaim(lastNameClaim);
claimMappingList.add(lastNameClaimMapping);
Claim emailClaim = new Claim();
emailClaim.setClaimUri(emailClaimURI);
ClaimMapping emailClaimMapping = new ClaimMapping();
emailClaimMapping.setRequested(true);
emailClaimMapping.setLocalClaim(emailClaim);
emailClaimMapping.setRemoteClaim(emailClaim);
claimMappingList.add(emailClaimMapping);
return claimMappingList.toArray(new ClaimMapping[claimMappingList.size()]);
}
private ClaimValue[] getUserClaims() {
ClaimValue[] claimValues = new ClaimValue[3];
ClaimValue firstName = new ClaimValue();
firstName.setClaimURI(firstNameClaimURI);
firstName.setValue(config.getUser().getNickname());
claimValues[0] = firstName;
ClaimValue lastName = new ClaimValue();
lastName.setClaimURI(lastNameClaimURI);
lastName.setValue(config.getUser().getUsername());
claimValues[1] = lastName;
ClaimValue email = new ClaimValue();
email.setClaimURI(emailClaimURI);
email.setValue(config.getUser().getEmail());
claimValues[2] = email;
return claimValues;
}
private enum HttpBinding {
HTTP_REDIRECT("HTTP-Redirect"),
HTTP_POST("HTTP-POST");
String binding;
HttpBinding(String binding) {
this.binding = binding;
}
}
private enum ClaimType {
LOCAL, CUSTOM, NONE
}
private enum User {
SUPER_TENANT_USER("samluser1", "samluser1", "carbon.super", "samluser1", "[email protected]", "samlnickuser1"),
TENANT_USER("[email protected]", "samluser2", "wso2.com", "samluser2", "[email protected]", "samlnickuser2");
private String username;
private String password;
private String tenantDomain;
private String tenantAwareUsername;
private String email;
private String nickname;
User(String username, String password, String tenantDomain, String tenantAwareUsername, String email,
String nickname) {
this.username = username;
this.password = password;
this.tenantDomain = tenantDomain;
this.tenantAwareUsername = tenantAwareUsername;
this.email = email;
this.nickname = nickname;
}
public String getUsername() {
return username;
}
public String getPassword() {
return password;
}
public String getTenantDomain() {
return tenantDomain;
}
public String getTenantAwareUsername() {
return tenantAwareUsername;
}
public String getEmail() {
return email;
}
public String getNickname() {
return nickname;
}
}
private enum App {
SUPER_TENANT_APP_WITH_SIGNING("travelocity.com", true),
TENANT_APP_WITHOUT_SIGNING("travelocity.com-saml-tenantwithoutsigning", false);
private String artifact;
private boolean signingEnabled;
App(String artifact, boolean signingEnabled) {
this.artifact = artifact;
this.signingEnabled = signingEnabled;
}
public String getArtifact() {
return artifact;
}
public boolean isSigningEnabled() {
return signingEnabled;
}
}
private static class SAMLConfig {
private TestUserMode userMode;
private User user;
private HttpBinding httpBinding;
private ClaimType claimType;
private App app;
/**
* Constructor
* @param userMode User mode
* @param user subject
* @param httpBinding Http binding of request
* @param claimType Claim types
* @param app Client application
*/
private SAMLConfig(TestUserMode userMode, User user, HttpBinding httpBinding, ClaimType claimType, App app) {
this.userMode = userMode;
this.user = user;
this.httpBinding = httpBinding;
this.claimType = claimType;
this.app = app;
}
public TestUserMode getUserMode() {
return userMode;
}
public App getApp() {
return app;
}
public User getUser() {
return user;
}
public ClaimType getClaimType() {
return claimType;
}
public HttpBinding getHttpBinding() {
return httpBinding;
}
@Override
public String toString() {
return "SAMLConfig[" +
", userMode=" + userMode.name() +
", user=" + user.getUsername() +
", httpBinding=" + httpBinding +
", claimType=" + claimType +
", app=" + app.getArtifact() +
']';
}
}
} | Fixing test failures
| modules/integration/tests-integration/tests-backend/src/test/java/org/wso2/identity/integration/test/saml/SAMLQueryProfileTestCase.java | Fixing test failures |
|
Java | apache-2.0 | 9875ba10922f548e90ef9657197f8673e99607cc | 0 | leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,leeyazhou/sharding-jdbc,apache/incubator-shardingsphere,apache/incubator-shardingsphere,apache/incubator-shardingsphere | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.core.rewrite.token.pojo;
import lombok.Getter;
import org.apache.shardingsphere.core.parse.sql.segment.dml.expr.ExpressionSegment;
import org.apache.shardingsphere.core.parse.sql.segment.dml.expr.complex.ComplexExpressionSegment;
import org.apache.shardingsphere.core.parse.sql.segment.dml.expr.simple.LiteralExpressionSegment;
import org.apache.shardingsphere.core.parse.sql.segment.dml.expr.simple.ParameterMarkerExpressionSegment;
/**
* Insert set add generated key token.
*
* @author panjuan
*/
@Getter
public final class InsertSetAddGeneratedKeyToken extends SQLToken implements Attachable {
private final String columnName;
private final ExpressionSegment columnValue;
public InsertSetAddGeneratedKeyToken(final int startIndex, final String columnName, final ExpressionSegment columnValue) {
super(startIndex);
this.columnName = columnName;
this.columnValue = columnValue;
}
@Override
public String toString() {
return String.format(", %s = %s", columnName, getColumnValue());
}
private String getColumnValue() {
if (columnValue instanceof ParameterMarkerExpressionSegment) {
return "?";
} else if (columnValue instanceof LiteralExpressionSegment) {
Object literals = ((LiteralExpressionSegment) columnValue).getLiterals();
return literals instanceof String ? String.format("'%s'", literals) : literals.toString();
}
return ((ComplexExpressionSegment) columnValue).getText();
}
}
| sharding-core/sharding-core-rewrite/src/main/java/org/apache/shardingsphere/core/rewrite/token/pojo/InsertSetAddGeneratedKeyToken.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.shardingsphere.core.rewrite.token.pojo;
import lombok.Getter;
/**
* Insert set add generated key token.
*
* @author panjuan
*/
@Getter
public final class InsertSetAddGeneratedKeyToken extends SQLToken implements Attachable {
private final String columnName;
public InsertSetAddGeneratedKeyToken(final int startIndex, final String columnName) {
super(startIndex);
this.columnName = columnName;
}
}
| add columnValue
| sharding-core/sharding-core-rewrite/src/main/java/org/apache/shardingsphere/core/rewrite/token/pojo/InsertSetAddGeneratedKeyToken.java | add columnValue |
|
Java | apache-2.0 | 578f7658c57e2ce58069eb2b171bcc9a3aba103e | 0 | juebanlin/util4j,juebanlin/util4j | package net.jueb.util4j.convert.audio;
import java.io.File;
import java.io.IOException;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import it.sauronsoftware.jave.AudioAttributes;
import it.sauronsoftware.jave.Encoder;
import it.sauronsoftware.jave.EncoderProgressListener;
import it.sauronsoftware.jave.EncodingAttributes;
import it.sauronsoftware.jave.MultimediaInfo;
import it.sauronsoftware.jave.MultimediaObject;
import net.jueb.util4j.file.FileUtil;
public class JaveAudioConvert {
private Logger log = LoggerFactory.getLogger(getClass());
private final String tempDir;// 临时目录
public static final String tmpPrefix=".tmp";
public static final String prefix=".ok";
public JaveAudioConvert() {
this(FileUtil.createTmpDir("JaveAudioConvert").getPath());
}
public JaveAudioConvert(String tempDir) {
this.tempDir = tempDir;
}
/**
* 音频转码
*
* @param audioData
* 音频数据
* @param codec
* 编码
* @param format
* 转换格式
* @return
*/
public final byte[] audioConvert(byte[] audioData, String codec, String format) {
AudioAttributes audioAttrs = new AudioAttributes();
audioAttrs.setCodec(codec);// 设置编码器:libmp3lame
audioAttrs.setBitRate(new Integer(128000)); // 设置比特率
audioAttrs.setChannels(new Integer(2)); // 设置声音频道
audioAttrs.setSamplingRate(new Integer(44100));// 设置节录率
audioAttrs.setVolume(100);// 设置音量
return audioConvert(audioData, format, audioAttrs);
}
/**
*
* @param audioData
* @param format
* @param audioAttrs
* @return
*/
public final byte[] audioConvert(byte[] audioData, String format, AudioAttributes audioAttrs) {
String name = UUID.randomUUID().toString();
File file = new File(tempDir, name + tmpPrefix);
File destFile = new File(tempDir, name + prefix);
try {
FileUtils.writeByteArrayToFile(file, audioData);
audioConvert(file, destFile, format, audioAttrs);
if (destFile.exists() && destFile.isFile()) {
return FileUtils.readFileToByteArray(destFile);
}
} catch (IOException e) {
log.error(e.getMessage(), e);
} finally {
if (file.exists()) {
file.delete();
}
if (destFile.exists()) {
destFile.delete();
}
}
return null;
}
/**
* 音频转换器
*
* @param sourceFile
* @param target
* @param codec
* 默认libmp3lame
* @param format
* 默认mp3
* @return
*/
public final void audioConvert(File sourceFile, File target, String codec, String format) {
AudioAttributes audioAttrs = new AudioAttributes();
audioAttrs.setCodec(codec);// 设置编码器:libmp3lame
audioAttrs.setBitRate(new Integer(128000)); // 设置比特率
audioAttrs.setChannels(new Integer(2)); // 设置声音频道
audioAttrs.setSamplingRate(new Integer(44100));// 设置节录率
audioAttrs.setVolume(100);// 设置音量
audioConvert(sourceFile, target, format, audioAttrs);
}
public final void audioConvert(File sourceFile, File target, String format, AudioAttributes audioAttrs) {
long times = System.currentTimeMillis();
EncodingAttributes attrs = new EncodingAttributes();
attrs.setFormat(format);// 设置输出格式
attrs.setAudioAttributes(audioAttrs);
Encoder encoder = new Encoder();
log.debug("sourceFile(len:"+sourceFile.length()+"):"+sourceFile.getPath());
log.debug("targetFile:"+target.getPath());
EncoderProgressListener epl = new EncoderProgressListener() {
@Override
public void sourceInfo(MultimediaInfo arg0) {
log.debug("MultimediaInfo:"+arg0);
}
@Override
public void progress(int arg0) {
log.debug("progress:"+arg0);
}
@Override
public void message(String arg0) {
log.debug("message:"+arg0);
}
};
try {
log.debug("startEncode……");
encoder.encode(new MultimediaObject(sourceFile), target, attrs, epl);
log.debug("endEncode,targetFile(len:"+target.length()+"):"+target.getPath());
} catch (Exception e) {
target.delete();
log.error(e.getMessage(), e);
}
times = System.currentTimeMillis() - times;
log.debug("use times:"+times);
}
public static void main(String[] args) {
JaveAudioConvert jc=new JaveAudioConvert();
jc.audioConvert(new File("d:/123.amr"), new File("d:/test/456.mp3"), "libmp3lame", "mp3");
}
}
| util4j/src/main/java/net/jueb/util4j/convert/audio/JaveAudioConvert.java | package net.jueb.util4j.convert.audio;
import java.io.File;
import java.io.IOException;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.UUID;
import org.apache.commons.io.FileUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import it.sauronsoftware.jave.AudioAttributes;
import it.sauronsoftware.jave.Encoder;
import it.sauronsoftware.jave.EncoderProgressListener;
import it.sauronsoftware.jave.EncodingAttributes;
import it.sauronsoftware.jave.MultimediaInfo;
import it.sauronsoftware.jave.MultimediaObject;
import net.jueb.util4j.file.FileUtil;
public class JaveAudioConvert {
private SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss SSS");
private Logger log = LoggerFactory.getLogger(getClass());
private final String tempDir;// 临时目录
public JaveAudioConvert() {
this(FileUtil.createTmpDir("JaveAudioConvert").getPath());
}
public JaveAudioConvert(String tempDir) {
this.tempDir = tempDir;
}
/**
* 音频转码
*
* @param audioData
* 音频数据
* @param codec
* 编码
* @param format
* 转换格式
* @return
*/
public final byte[] audioConvert(byte[] audioData, String codec, String format) {
AudioAttributes audioAttrs = new AudioAttributes();
audioAttrs.setCodec(codec);// 设置编码器:libmp3lame
audioAttrs.setBitRate(new Integer(128000)); // 设置比特率
audioAttrs.setChannels(new Integer(2)); // 设置声音频道
audioAttrs.setSamplingRate(new Integer(44100));// 设置节录率
audioAttrs.setVolume(100);// 设置音量
return audioConvert(audioData, format, audioAttrs);
}
public final byte[] audioConvert(byte[] audioData, String format, AudioAttributes audioAttrs) {
String name = UUID.randomUUID().toString();
File file = new File(tempDir, name + ".tmp");
File destFile = new File(tempDir, name + ".result");
try {
FileUtils.writeByteArrayToFile(file, audioData);
audioConvert(file, destFile, format, audioAttrs);
if (destFile.exists()) {
return FileUtils.readFileToByteArray(destFile);
}
} catch (IOException e) {
log.error(e.getMessage(), e);
} finally {
if (file.exists()) {
file.delete();
}
if (destFile.exists()) {
destFile.delete();
}
}
return null;
}
/**
* 音频转换器
*
* @param sourceFile
* @param target
* @param codec
* 默认libmp3lame
* @param format
* 默认mp3
* @return
*/
public final String audioConvert(File sourceFile, File target, String codec, String format) {
AudioAttributes audioAttrs = new AudioAttributes();
audioAttrs.setCodec(codec);// 设置编码器:libmp3lame
audioAttrs.setBitRate(new Integer(128000)); // 设置比特率
audioAttrs.setChannels(new Integer(2)); // 设置声音频道
audioAttrs.setSamplingRate(new Integer(44100));// 设置节录率
audioAttrs.setVolume(100);// 设置音量
return audioConvert(sourceFile, target, format, audioAttrs);
}
public final String audioConvert(File sourceFile, File target, String format, AudioAttributes audioAttrs) {
long time = System.currentTimeMillis();
final StringBuffer sb = new StringBuffer("\n");
EncodingAttributes attrs = new EncodingAttributes();
attrs.setFormat(format);// 设置输出格式
attrs.setAudioAttributes(audioAttrs);
Encoder encoder = new Encoder();
sb.append("[" + sdf.format(new Date()) + "]" + "sourceFilePath=" + sourceFile.getPath() + "\n");
sb.append("[" + sdf.format(new Date()) + "]" + "sourceFileLength=" + sourceFile.length() + "\n");
sb.append("[" + sdf.format(new Date()) + "]" + "targetFilePath=" + target.getPath() + "\n");
EncoderProgressListener epl = new EncoderProgressListener() {
@Override
public void sourceInfo(MultimediaInfo arg0) {
sb.append("[" + sdf.format(new Date()) + "]" + "MultimediaInfo:" + arg0 + "\n");
}
@Override
public void progress(int arg0) {
sb.append("[" + sdf.format(new Date()) + "]" + "progress:" + arg0 + "\n");
}
@Override
public void message(String arg0) {
sb.append("[" + sdf.format(new Date()) + "]" + "message:" + arg0 + "\n");
}
};
try {
sb.append("[" + sdf.format(new Date()) + "]" + "startEncode" + "\n");
encoder.encode(new MultimediaObject(sourceFile), target, attrs, epl);
sb.append("[" + sdf.format(new Date()) + "]" + "endEncode" + "\n");
if (target.exists()) {// 转码成功
sb.append("[" + sdf.format(new Date()) + "]" + "encode succees" + "\n");
}
} catch (Exception e) {
target.delete();
sb.append("[" + sdf.format(new Date()) + "]" + "encodeError:" + e.getMessage() + "\n");
log.error(e.getMessage(), e);
}
time = System.currentTimeMillis() - time;
sb.append("[" + sdf.format(new Date()) + "]" + "times:" + time + "\n");
return sb.toString();
}
}
|
Signed-off-by: juebanlin <juebanlin@DESKTOP-68KCVE3> | util4j/src/main/java/net/jueb/util4j/convert/audio/JaveAudioConvert.java | ||
Java | apache-2.0 | cba501ada1cb2251e3ad49c20ba9f4b0c42dc900 | 0 | wvanderdeijl/adf-selenium,adfemg/adf-selenium | package com.redheap.selenium.component;
import com.redheap.selenium.component.uix.UixValue;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
public class AdfSelectOneListbox extends UixValue {
private static final String SUBID_content = "content"; // <div> element
private static final String SUBID_item = "item"; // <input type='listbox'> element
private static final String SUBID_label = "label"; // <label> element
private static final String JS_VALUE_BY_LABEL =
JS_FIND_COMPONENT +
"var items=comp.getSelectItems(); for (var i=0;i<items.length;i++){if (items[i].getLabel()===arguments[1]) return items[i].getValue()}; return null;";
private static final String JS_LABEL_BY_VALUE =
JS_FIND_COMPONENT +
"var items=comp.getSelectItems(); for (var i=0;i<items.length;i++){if (items[i].getValue()===arguments[1]) return items[i].getLabel()}; return null;";
public AdfSelectOneListbox(WebDriver webDriver, String clientId) {
super(webDriver, clientId);
}
public void clickItemByIndex(int index) {
final WebElement elem = findItem(index);
executeScript("arguments[0].scrollIntoView()", elem); // needed in 12.2
elem.click(); // click item within list
waitForPpr();
}
public void clickItemByLabel(String label) {
clickItemByIndex(getValueByLabel(label));
}
public String getItemLabel(int index) {
return (String) executeScript(JS_LABEL_BY_VALUE, getClientId(), Integer.toString(index));
}
public int getValueByLabel(String label) {
String value = (String) executeScript(JS_VALUE_BY_LABEL, getClientId(), label);
return value == null ? -1 : Integer.valueOf(value);
}
/**
* Gets the label of the selected value.
* @return
*/
public String getValueLabel() {
Object value = getValue();
if (value instanceof Number) {
return getItemLabel(((Number) value).intValue());
} else if (value instanceof String) {
return getItemLabel(Integer.valueOf((String) value));
} else {
return null;
}
}
protected WebElement findContent() {
return findSubIdElement(SUBID_content);
}
protected WebElement findItem(int index) {
return findSubIdElement(SUBID_item + "[" + index + "]");
}
protected WebElement findLabel() {
return findSubIdElement(SUBID_label);
}
}
| SeleniumTools/src/com/redheap/selenium/component/AdfSelectOneListbox.java | package com.redheap.selenium.component;
import com.redheap.selenium.component.uix.UixValue;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
public class AdfSelectOneListbox extends UixValue {
private static final String SUBID_content = "content"; // <div> element
private static final String SUBID_item = "item"; // <input type='listbox'> element
private static final String SUBID_label = "label"; // <label> element
private static final String JS_VALUE_BY_LABEL =
JS_FIND_COMPONENT +
"var items=comp.getSelectItems(); for (var i=0;i<items.length;i++){if (items[i].getLabel()===arguments[1]) return items[i].getValue()}; return null;";
private static final String JS_LABEL_BY_VALUE =
JS_FIND_COMPONENT +
"var items=comp.getSelectItems(); for (var i=0;i<items.length;i++){if (items[i].getValue()===arguments[1]) return items[i].getLabel()}; return null;";
public AdfSelectOneListbox(WebDriver webDriver, String clientId) {
super(webDriver, clientId);
}
public void clickItemByIndex(int index) {
findItem(index).click(); // click item within list
waitForPpr();
}
public void clickItemByLabel(String label) {
clickItemByIndex(getValueByLabel(label));
}
public String getItemLabel(int index) {
return (String) executeScript(JS_LABEL_BY_VALUE, getClientId(), Integer.toString(index));
}
public int getValueByLabel(String label) {
String value = (String) executeScript(JS_VALUE_BY_LABEL, getClientId(), label);
return value == null ? -1 : Integer.valueOf(value);
}
/**
* Gets the label of the selected value.
* @return
*/
public String getValueLabel() {
Object value = getValue();
if (value instanceof Number) {
return getItemLabel(((Number) value).intValue());
} else if (value instanceof String) {
return getItemLabel(Integer.valueOf((String) value));
} else {
return null;
}
}
protected WebElement findContent() {
return findSubIdElement(SUBID_content);
}
protected WebElement findItem(int index) {
return findSubIdElement(SUBID_item + "[" + index + "]");
}
protected WebElement findLabel() {
return findSubIdElement(SUBID_label);
}
}
| 12.2 requires us to first scroll the item into view
| SeleniumTools/src/com/redheap/selenium/component/AdfSelectOneListbox.java | 12.2 requires us to first scroll the item into view |
|
Java | apache-2.0 | 44c914a2648699722fe8082aeaa247a2e7ff8288 | 0 | apache/commons-pool,apache/commons-pool,apache/commons-pool | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool2.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.pool2.KeyedObjectPool;
import org.apache.commons.pool2.KeyedPoolableObjectFactory;
import org.apache.commons.pool2.PoolUtils;
/**
* A configurable <code>KeyedObjectPool</code> implementation.
* <p>
* When coupled with the appropriate {@link KeyedPoolableObjectFactory},
* <code>GenericKeyedObjectPool</code> provides robust pooling functionality for
* keyed objects. A <code>GenericKeyedObjectPool</code> can be viewed as a map
* of sub-pools, keyed on the (unique) key values provided to the
* {@link #preparePool preparePool}, {@link #addObject addObject} or
* {@link #borrowObject borrowObject} methods. Each time a new key value is
* provided to one of these methods, a sub-new pool is created under the given
* key to be managed by the containing <code>GenericKeyedObjectPool.</code>
* <p>
* Optionally, one may configure the pool to examine and possibly evict objects
* as they sit idle in the pool and to ensure that a minimum number of idle
* objects is maintained for each key. This is performed by an "idle object
* eviction" thread, which runs asynchronously. Caution should be used when
* configuring this optional feature. Eviction runs contend with client threads
* for access to objects in the pool, so if they run too frequently performance
* issues may result.
* <p>
* Implementation note: To prevent possible deadlocks, care has been taken to
* ensure that no call to a factory method will occur within a synchronization
* block. See POOL-125 and DBCP-44 for more information.
*
* @see GenericObjectPool
*
* @param <K> The type of keys maintained by this pool.
* @param <T> Type of element pooled in this pool.
*
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision$ $Date$
*/
public class GenericKeyedObjectPool<K,T> extends BaseGenericObjectPool<T>
implements KeyedObjectPool<K,T>, GenericKeyedObjectPoolMBean<K> {
/**
* Create a new <code>GenericKeyedObjectPool</code> using defaults from
* {@link GenericKeyedObjectPoolConfig}.
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory<K,T> factory) {
this(factory, new GenericKeyedObjectPoolConfig());
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using a specific
* configuration.
*
* @param config The configuration to use for this pool instance. The
* configuration is used by value. Subsequent changes to
* the configuration object will not be reflected in the
* pool.
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory<K,T> factory,
GenericKeyedObjectPoolConfig config) {
super(config, ONAME_BASE, config.getJmxNamePrefix());
if (factory == null) {
throw new IllegalArgumentException("factory may not be null");
}
this.factory = factory;
setConfig(config);
startEvictor(getMinEvictableIdleTimeMillis());
}
/**
* Returns the limit on the number of object instances allocated by the pool
* (checked out or idle), per key. When the limit is reached, the sub-pool
* is said to be exhausted. A negative value indicates no limit.
*
* @return the limit on the number of active instances per key
*
* @see #setMaxTotalPerKey
*/
@Override
public int getMaxTotalPerKey() {
return maxTotalPerKey;
}
/**
* Sets the limit on the number of object instances allocated by the pool
* (checked out or idle), per key. When the limit is reached, the sub-pool
* is said to be exhausted. A negative value indicates no limit.
*
* @param maxTotalPerKey the limit on the number of active instances per key
*
* @see #getMaxTotalPerKey
*/
public void setMaxTotalPerKey(int maxTotalPerKey) {
this.maxTotalPerKey = maxTotalPerKey;
}
/**
* Returns the cap on the number of "idle" instances per key in the pool.
* If maxIdle is set too low on heavily loaded systems it is possible you
* will see objects being destroyed and almost immediately new objects
* being created. This is a result of the active threads momentarily
* returning objects faster than they are requesting them them, causing the
* number of idle objects to rise above maxIdle. The best value for maxIdle
* for heavily loaded system will vary but the default is a good starting
* point.
*
* @return the maximum number of "idle" instances that can be held in a
* given keyed sub-pool or a negative value if there is no limit
*
* @see #setMaxIdlePerKey
*/
@Override
public int getMaxIdlePerKey() {
return maxIdlePerKey;
}
/**
* Sets the cap on the number of "idle" instances per key in the pool.
* If maxIdle is set too low on heavily loaded systems it is possible you
* will see objects being destroyed and almost immediately new objects
* being created. This is a result of the active threads momentarily
* returning objects faster than they are requesting them them, causing the
* number of idle objects to rise above maxIdle. The best value for maxIdle
* for heavily loaded system will vary but the default is a good starting
* point.
*
* @param maxIdlePerKey the maximum number of "idle" instances that can be
* held in a given keyed sub-pool. Use a negative value
* for no limit
*
* @see #getMaxIdlePerKey
*/
public void setMaxIdlePerKey(int maxIdlePerKey) {
this.maxIdlePerKey = maxIdlePerKey;
}
/**
* Sets the target for the minimum number of idle objects to maintain in
* each of the keyed sub-pools. This setting only has an effect if it is
* positive and {@link #getTimeBetweenEvictionRunsMillis()} is greater than
* zero. If this is the case, an attempt is made to ensure that each
* sub-pool has the required minimum number of instances during idle object
* eviction runs.
* <p>
* If the configured value of minIdlePerKey is greater than the configured
* value for maxIdlePerKey then the value of maxIdlePerKey will be used
* instead.
*
* @param minIdlePerKey The minimum size of the each keyed pool
*
* @see #getMinIdlePerKey
* @see #getMaxIdlePerKey()
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setMinIdlePerKey(int minIdlePerKey) {
this.minIdlePerKey = minIdlePerKey;
}
/**
* Returns the target for the minimum number of idle objects to maintain in
* each of the keyed sub-pools. This setting only has an effect if it is
* positive and {@link #getTimeBetweenEvictionRunsMillis()} is greater than
* zero. If this is the case, an attempt is made to ensure that each
* sub-pool has the required minimum number of instances during idle object
* eviction runs.
* <p>
* If the configured value of minIdlePerKey is greater than the configured
* value for maxIdlePerKey then the value of maxIdlePerKey will be used
* instead.
*
* @return minimum size of the each keyed pool
*
* @see #setTimeBetweenEvictionRunsMillis
*/
@Override
public int getMinIdlePerKey() {
int maxIdlePerKey = getMaxIdlePerKey();
if (this.minIdlePerKey > maxIdlePerKey) {
return maxIdlePerKey;
} else {
return minIdlePerKey;
}
}
/**
* Sets the configuration.
*
* @param conf the new configuration to use.
*
* @see GenericKeyedObjectPoolConfig
*/
public void setConfig(GenericKeyedObjectPoolConfig conf) {
setLifo(conf.getLifo());
setMaxIdlePerKey(conf.getMaxIdlePerKey());
setMaxTotalPerKey(conf.getMaxTotalPerKey());
setMaxTotal(conf.getMaxTotal());
setMinIdlePerKey(conf.getMinIdlePerKey());
setMaxWaitMillis(conf.getMaxWaitMillis());
setBlockWhenExhausted(conf.getBlockWhenExhausted());
setTestOnBorrow(conf.getTestOnBorrow());
setTestOnReturn(conf.getTestOnReturn());
setTestWhileIdle(conf.getTestWhileIdle());
setNumTestsPerEvictionRun(conf.getNumTestsPerEvictionRun());
setMinEvictableIdleTimeMillis(conf.getMinEvictableIdleTimeMillis());
setSoftMinEvictableIdleTimeMillis(
conf.getSoftMinEvictableIdleTimeMillis());
setTimeBetweenEvictionRunsMillis(
conf.getTimeBetweenEvictionRunsMillis());
setEvictionPolicyClassName(conf.getEvictionPolicyClassName());
}
/**
* Obtain a reference to the factory used to create, destroy and validate
* the objects used by this pool.
*
* @return the factory
*/
public KeyedPoolableObjectFactory<K, T> getFactory() {
return factory;
}
/**
* Equivalent to <code>{@link #borrowObject(Object, long) borrowObject}(key,
* {@link #getMaxWaitMillis()})</code>.
*/
@Override
public T borrowObject(K key) throws Exception {
return borrowObject(key, getMaxWaitMillis());
}
/**
* Borrows an object from the sub-pool associated with the given key using
* the specified waiting time which only applies if
* {@link #getBlockWhenExhausted()} is true.
* <p>
* If there is one or more idle instances available in the sub-pool
* associated with the given key, then an idle instance will be selected
* based on the value of {@link #getLifo()}, activated and returned. If
* activation fails, or {@link #getTestOnBorrow() testOnBorrow} is set to
* <code>true</code> and validation fails, the instance is destroyed and the
* next available instance is examined. This continues until either a valid
* instance is returned or there are no more idle instances available.
* <p>
* If there are no idle instances available in the sub-pool associated with
* the given key, behavior depends on the {@link #getMaxTotalPerKey()
* maxTotalPerKey}, {@link #getMaxTotal() maxTotal}, and (if applicable)
* {@link #getBlockWhenExhausted()} and the value passed in to the
* <code>borrowMaxWait</code> parameter. If the number of instances checked
* out from the sub-pool under the given key is less than
* <code>maxTotalPerKey</code> and the total number of instances in
* circulation (under all keys) is less than <code>maxTotal</code>, a new
* instance is created, activated and (if applicable) validated and returned
* to the caller.
* <p>
* If the associated sub-pool is exhausted (no available idle instances and
* no capacity to create new ones), this method will either block
* ({@link #getBlockWhenExhausted()} is true) or throw a
* <code>NoSuchElementException</code>
* ({@link #getBlockWhenExhausted()} is false).
* The length of time that this method will block when
* {@link #getBlockWhenExhausted()} is true is determined by the value
* passed in to the <code>borrowMaxWait</code> parameter.
* <p>
* When <code>maxTotal</code> is set to a positive value and this method is
* invoked when at the limit with no idle instances available, an attempt is
* made to create room by clearing the oldest 15% of the elements from the
* keyed sub-pools.
* <p>
* When the pool is exhausted, multiple calling threads may be
* simultaneously blocked waiting for instances to become available. A
* "fairness" algorithm has been implemented to ensure that threads receive
* available instances in request arrival order.
*
* @param key pool key
* @param borrowMaxWait maximum amount of time to wait (in milliseconds)
*
* @return object instance from the keyed pool
*
* @throws NoSuchElementException if a keyed object instance cannot be
* returned.
*/
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
updateStatsBorrow(p, waitTime);
return p.getObject();
}
/**
* Returns an object to a keyed sub-pool.
* <p>
* If {@link #getMaxIdlePerKey() maxIdle} is set to a positive value and the
* number of idle instances under the given key has reached this value, the
* returning instance is destroyed.
* <p>
* If {@link #getTestOnReturn() testOnReturn} == true, the returning
* instance is validated before being returned to the idle instance sub-pool
* under the given key. In this case, if validation fails, the instance is
* destroyed.
* <p>
* Exceptions encountered destroying objects for any reason are swallowed
* but remain accessible via {@link #getSwallowedExceptions()}.
*
* @param key pool key
* @param obj instance to return to the keyed pool
*
* @throws IllegalStateException if an object is returned to the pool that
* was not borrowed from it or if an object is
* returned to the pool multiple times
*/
@Override
public void returnObject(K key, T obj) {
ObjectDeque<T> objectDeque = poolMap.get(key);
PooledObject<T> p = objectDeque.getAllObjects().get(obj);
if (p == null) {
throw new IllegalStateException(
"Returned object not currently part of this pool");
}
long activeTime = p.getActiveTimeMillis();
if (getTestOnReturn()) {
if (!factory.validateObject(key, obj)) {
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
updateStatsReturn(activeTime);
return;
}
}
try {
factory.passivateObject(key, obj);
} catch (Exception e1) {
swallowException(e1);
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
updateStatsReturn(activeTime);
return;
}
if (!p.deallocate()) {
throw new IllegalStateException(
"Object has already been retured to this pool");
}
int maxIdle = getMaxIdlePerKey();
LinkedBlockingDeque<PooledObject<T>> idleObjects =
objectDeque.getIdleObjects();
if (isClosed() || maxIdle > -1 && maxIdle <= idleObjects.size()) {
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
} else {
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
if (hasBorrowWaiters()) {
reuseCapacity();
}
updateStatsReturn(activeTime);
}
/**
* {@inheritDoc}
* <p>
* Activation of this method decrements the active count associated with
* the given keyed pool and attempts to destroy <code>obj.</code>
*
* @param key pool key
* @param obj instance to invalidate
*
* @throws Exception if an exception occurs destroying the
* object
* @throws IllegalStateException if obj does not belong to the pool
* under the given key
*/
@Override
public void invalidateObject(K key, T obj) throws Exception {
ObjectDeque<T> objectDeque = poolMap.get(key);
PooledObject<T> p = objectDeque.getAllObjects().get(obj);
if (p == null) {
throw new IllegalStateException(
"Object not currently part of this pool");
}
destroy(key, p, true);
}
/**
* Clears any objects sitting idle in the pool by removing them from the
* idle instance sub-pools and then invoking the configured
* PoolableObjectFactory's
* {@link KeyedPoolableObjectFactory#destroyObject(Object, Object)} method
* on each idle instance.
* <p>
* Implementation notes:
* <ul>
* <li>This method does not destroy or effect in any way instances that are
* checked out when it is invoked.</li>
* <li>Invoking this method does not prevent objects being returned to the
* idle instance pool, even during its execution. Additional instances may
* be returned while removed items are being destroyed.</li>
* <li>Exceptions encountered destroying idle instances are swallowed but
* remain accessible via {@link #getSwallowedExceptions()}.</li>
* </ul>
*/
@Override
public void clear() {
Iterator<K> iter = poolMap.keySet().iterator();
while (iter.hasNext()) {
clear(iter.next());
}
}
/**
* Clears the specified sub-pool, removing all pooled instances
* corresponding to the given <code>key</code>. Exceptions encountered
* destroying idle instances are swallowed but remain accessible via
* {@link #getSwallowedExceptions()}.
*
* @param key the key to clear
*/
@Override
public void clear(K key) {
ObjectDeque<T> objectDeque = register(key);
try {
LinkedBlockingDeque<PooledObject<T>> idleObjects =
objectDeque.getIdleObjects();
PooledObject<T> p = idleObjects.poll();
while (p != null) {
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
p = idleObjects.poll();
}
} finally {
deregister(key);
}
}
/**
* Returns the total number of instances current borrowed from this pool but
* not yet returned.
*/
@Override
public int getNumActive() {
return numTotal.get() - getNumIdle();
}
@Override
public int getNumIdle() {
Iterator<ObjectDeque<T>> iter = poolMap.values().iterator();
int result = 0;
while (iter.hasNext()) {
result += iter.next().getIdleObjects().size();
}
return result;
}
/**
* Returns the number of instances currently borrowed from but not yet
* returned to the sub-pool corresponding to the given <code>key</code>.
*
* @param key the key to query
*/
@Override
public int getNumActive(K key) {
final ObjectDeque<T> objectDeque = poolMap.get(key);
if (objectDeque != null) {
return objectDeque.getAllObjects().size() -
objectDeque.getIdleObjects().size();
} else {
return 0;
}
}
/**
* Returns the number of idle instances in the sub-pool corresponding to the
* given <code>key</code>.
*
* @param key the key to query
*/
@Override
public int getNumIdle(K key) {
final ObjectDeque<T> objectDeque = poolMap.get(key);
return objectDeque != null ? objectDeque.getIdleObjects().size() : 0;
}
/**
* Closes the keyed object pool. Once the pool is closed,
* {@link #borrowObject(Object)} will fail with IllegalStateException, but
* {@link #returnObject(Object, Object)} and
* {@link #invalidateObject(Object, Object)} will continue to work, with
* returned objects destroyed on return.
* <p>
* Destroys idle instances in the pool by invoking {@link #clear()}.
*/
@Override
public void close() {
if (isClosed()) {
return;
}
synchronized (closeLock) {
if (isClosed()) {
return;
}
// Stop the evictor before the pool is closed since evict() calls
// assertOpen()
startEvictor(-1L);
closed = true;
// This clear removes any idle objects
clear();
jmxUnregister();
// Release any threads that were waiting for an object
Iterator<ObjectDeque<T>> iter = poolMap.values().iterator();
while (iter.hasNext()) {
iter.next().getIdleObjects().interuptTakeWaiters();
}
// This clear cleans up the keys now any waiting threads have been
// interrupted
clear();
}
}
/**
* Clears oldest 15% of objects in pool. The method sorts the objects into
* a TreeMap and then iterates the first 15% for removal.
*/
public void clearOldest() {
// build sorted map of idle objects
final Map<PooledObject<T>, K> map = new TreeMap<PooledObject<T>, K>();
for (K k : poolMap.keySet()) {
ObjectDeque<T> queue = poolMap.get(k);
// Protect against possible NPE if key has been removed in another
// thread. Not worth locking the keys while this loop completes.
if (queue != null) {
final LinkedBlockingDeque<PooledObject<T>> idleObjects =
queue.getIdleObjects();
for (PooledObject<T> p : idleObjects) {
// each item into the map using the PooledObject object as the
// key. It then gets sorted based on the idle time
map.put(p, k);
}
}
}
// Now iterate created map and kill the first 15% plus one to account
// for zero
int itemsToRemove = ((int) (map.size() * 0.15)) + 1;
Iterator<Map.Entry<PooledObject<T>, K>> iter =
map.entrySet().iterator();
while (iter.hasNext() && itemsToRemove > 0) {
Map.Entry<PooledObject<T>, K> entry = iter.next();
// kind of backwards on naming. In the map, each key is the
// PooledObject because it has the ordering with the timestamp
// value. Each value that the key references is the key of the
// list it belongs to.
K key = entry.getValue();
PooledObject<T> p = entry.getKey();
// Assume the destruction succeeds
boolean destroyed = true;
try {
destroyed = destroy(key, p, false);
} catch (Exception e) {
swallowException(e);
}
if (destroyed) {
itemsToRemove--;
}
}
}
/**
* Attempt to create one new instance to serve from the most heavily
* loaded pool that can add a new instance.
*
* This method exists to ensure liveness in the pool when threads are
* parked waiting and capacity to create instances under the requested keys
* subsequently becomes available.
*
* This method is not guaranteed to create an instance and its selection
* of the most loaded pool that can create an instance may not always be
* correct, since it does not lock the pool and instances may be created,
* borrowed, returned or destroyed by other threads while it is executing.
*/
private void reuseCapacity() {
final int maxTotalPerKey = getMaxTotalPerKey();
// Find the most loaded pool that could take a new instance
int maxQueueLength = 0;
LinkedBlockingDeque<PooledObject<T>> mostLoaded = null;
K loadedKey = null;
for (K k : poolMap.keySet()) {
final ObjectDeque<T> deque = poolMap.get(k);
if (deque != null) {
final LinkedBlockingDeque<PooledObject<T>> pool = deque.getIdleObjects();
final int queueLength = pool.getTakeQueueLength();
if (getNumActive(k) < maxTotalPerKey && queueLength > maxQueueLength) {
maxQueueLength = queueLength;
mostLoaded = pool;
loadedKey = k;
}
}
}
// Attempt to add an instance to the most loaded pool
if (mostLoaded != null) {
register(loadedKey);
try {
PooledObject<T> p = create(loadedKey);
if (p != null) {
addIdleObject(loadedKey, p);
}
} catch (Exception e) {
swallowException(e);
} finally {
deregister(loadedKey);
}
}
}
private boolean hasBorrowWaiters() {
for (K k : poolMap.keySet()) {
final ObjectDeque<T> deque = poolMap.get(k);
if (deque != null) {
final LinkedBlockingDeque<PooledObject<T>> pool =
deque.getIdleObjects();
if(pool.hasTakeWaiters()) {
return true;
}
}
}
return false;
}
/**
* {@inheritDoc}
* <p>
* Successive activations of this method examine objects in keyed sub-pools
* in sequence, cycling through the keys and examining objects in
* oldest-to-youngest order within the keyed sub-pools.
*/
@Override
public void evict() throws Exception {
assertOpen();
if (getNumIdle() == 0) {
return;
}
PooledObject<T> underTest = null;
EvictionPolicy<T> evictionPolicy = getEvictionPolicy();
synchronized (evictionLock) {
EvictionConfig evictionConfig = new EvictionConfig(
getMinEvictableIdleTimeMillis(),
getSoftMinEvictableIdleTimeMillis(),
getMinIdlePerKey());
boolean testWhileIdle = getTestWhileIdle();
LinkedBlockingDeque<PooledObject<T>> idleObjects = null;
for (int i = 0, m = getNumTests(); i < m; i++) {
if(evictionIterator == null || !evictionIterator.hasNext()) {
if (evictionKeyIterator == null ||
!evictionKeyIterator.hasNext()) {
List<K> keyCopy = new ArrayList<K>();
Lock readLock = keyLock.readLock();
readLock.lock();
try {
keyCopy.addAll(poolKeyList);
} finally {
readLock.unlock();
}
evictionKeyIterator = keyCopy.iterator();
}
while (evictionKeyIterator.hasNext()) {
evictionKey = evictionKeyIterator.next();
ObjectDeque<T> objectDeque = poolMap.get(evictionKey);
if (objectDeque == null) {
continue;
}
idleObjects = objectDeque.getIdleObjects();
if (getLifo()) {
evictionIterator = idleObjects.descendingIterator();
} else {
evictionIterator = idleObjects.iterator();
}
if (evictionIterator.hasNext()) {
break;
}
evictionIterator = null;
}
}
if (evictionIterator == null) {
// Pools exhausted
return;
}
try {
underTest = evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
evictionIterator = null;
continue;
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (evictionPolicy.evict(evictionConfig, underTest,
poolMap.get(evictionKey).getIdleObjects().size())) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
if (testWhileIdle) {
boolean active = false;
try {
factory.activateObject(evictionKey,
underTest.getObject());
active = true;
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
if (active) {
if (!factory.validateObject(evictionKey,
underTest.getObject())) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
try {
factory.passivateObject(evictionKey,
underTest.getObject());
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
}
}
}
if (!underTest.endEvictionTest(idleObjects)) {
// TODO - May need to add code here once additional
// states are used
}
}
}
}
}
private PooledObject<T> create(K key) throws Exception {
int maxTotalPerKey = getMaxTotalPerKey(); // Per key
int maxTotal = getMaxTotal(); // All keys
// Check against the overall limit
boolean loop = true;
while (loop) {
int newNumTotal = numTotal.incrementAndGet();
if (maxTotal > -1 && newNumTotal > maxTotal) {
numTotal.decrementAndGet();
if (getNumIdle() == 0) {
return null;
} else {
clearOldest();
}
} else {
loop = false;
}
}
ObjectDeque<T> objectDeque = poolMap.get(key);
long newCreateCount = objectDeque.getCreateCount().incrementAndGet();
// Check against the per key limit
if (maxTotalPerKey > -1 && newCreateCount > maxTotalPerKey ||
newCreateCount > Integer.MAX_VALUE) {
numTotal.decrementAndGet();
objectDeque.getCreateCount().decrementAndGet();
return null;
}
T t = null;
try {
t = factory.makeObject(key);
} catch (Exception e) {
numTotal.decrementAndGet();
throw e;
}
PooledObject<T> p = new PooledObject<T>(t);
createdCount.incrementAndGet();
objectDeque.getAllObjects().put(t, p);
return p;
}
private boolean destroy(K key, PooledObject<T> toDestroy, boolean always)
throws Exception {
ObjectDeque<T> objectDeque = register(key);
try {
boolean isIdle = objectDeque.getIdleObjects().remove(toDestroy);
if (isIdle || always) {
objectDeque.getAllObjects().remove(toDestroy.getObject());
toDestroy.invalidate();
try {
factory.destroyObject(key, toDestroy.getObject());
} finally {
objectDeque.getCreateCount().decrementAndGet();
destroyedCount.incrementAndGet();
numTotal.decrementAndGet();
}
return true;
} else {
return false;
}
} finally {
deregister(key);
}
}
/*
* register() and deregister() must always be used as a pair.
*/
private ObjectDeque<T> register(K k) {
Lock lock = keyLock.readLock();
ObjectDeque<T> objectDeque = null;
try {
lock.lock();
objectDeque = poolMap.get(k);
if (objectDeque == null) {
// Upgrade to write lock
lock.unlock();
lock = keyLock.writeLock();
lock.lock();
objectDeque = poolMap.get(k);
if (objectDeque == null) {
objectDeque = new ObjectDeque<T>();
objectDeque.getNumInterested().incrementAndGet();
// NOTE: Keys must always be added to both poolMap and
// poolKeyList at the same time while protected by
// keyLock.writeLock()
poolMap.put(k, objectDeque);
poolKeyList.add(k);
} else {
objectDeque.getNumInterested().incrementAndGet();
}
} else {
objectDeque.getNumInterested().incrementAndGet();
}
} finally {
lock.unlock();
}
return objectDeque;
}
/*
* register() and deregister() must always be used as a pair.
*/
private void deregister(K k) {
ObjectDeque<T> objectDeque;
objectDeque = poolMap.get(k);
long numInterested = objectDeque.getNumInterested().decrementAndGet();
if (numInterested == 0 && objectDeque.getCreateCount().get() == 0) {
// Potential to remove key
Lock writeLock = keyLock.writeLock();
writeLock.lock();
try {
if (objectDeque.getCreateCount().get() == 0 &&
objectDeque.getNumInterested().get() == 0) {
// NOTE: Keys must always be removed from both poolMap and
// poolKeyList at the same time while protected by
// keyLock.writeLock()
poolMap.remove(k);
poolKeyList.remove(k);
}
} finally {
writeLock.unlock();
}
}
}
@Override
void ensureMinIdle() throws Exception {
int minIdlePerKey = getMinIdlePerKey();
if (minIdlePerKey < 1) {
return;
}
for (K k : poolMap.keySet()) {
ensureMinIdle(k);
}
}
private void ensureMinIdle(K key) throws Exception {
// Calculate current pool objects
ObjectDeque<T> objectDeque = poolMap.get(key);
// Protect against NPEs in case the key has been removed
if (objectDeque == null) {
return;
}
// this method isn't synchronized so the
// calculateDeficit is done at the beginning
// as a loop limit and a second time inside the loop
// to stop when another thread already returned the
// needed objects
int deficit = calculateDeficit(objectDeque);
for (int i = 0; i < deficit && calculateDeficit(objectDeque) > 0; i++) {
addObject(key);
}
}
/**
* Create an object using the {@link KeyedPoolableObjectFactory#makeObject
* factory}, passivate it, and then place it in the idle object pool.
* <code>addObject</code> is useful for "pre-loading" a pool with idle
* objects.
*
* @param key the key a new instance should be added to
*
* @throws Exception when {@link KeyedPoolableObjectFactory#makeObject}
* fails.
*/
@Override
public void addObject(K key) throws Exception {
assertOpen();
register(key);
try {
PooledObject<T> p = create(key);
addIdleObject(key, p);
} finally {
deregister(key);
}
}
private void addIdleObject(K key, PooledObject<T> p) throws Exception {
if (p != null) {
factory.passivateObject(key, p.getObject());
LinkedBlockingDeque<PooledObject<T>> idleObjects =
poolMap.get(key).getIdleObjects();
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
}
/**
* Registers a key for pool control and ensures that
* {@link #getMinIdlePerKey()} idle instances are created.
*
* @param key - The key to register for pool control.
*/
public void preparePool(K key) throws Exception {
int minIdlePerKey = getMinIdlePerKey();
if (minIdlePerKey < 1) {
return;
}
ensureMinIdle(key);
}
private int getNumTests() {
int totalIdle = getNumIdle();
int numTests = getNumTestsPerEvictionRun();
if (numTests >= 0) {
return Math.min(numTests, totalIdle);
}
return(int)(Math.ceil(totalIdle/Math.abs((double)numTests)));
}
private int calculateDeficit(ObjectDeque<T> objectDeque) {
if (objectDeque == null) {
return getMinIdlePerKey();
}
// Used more than once so keep a local copy so the value is consistent
int maxTotal = getMaxTotal();
int maxTotalPerKey = getMaxTotalPerKey();
int objectDefecit = 0;
// Calculate no of objects needed to be created, in order to have
// the number of pooled objects < maxTotalPerKey();
objectDefecit = getMinIdlePerKey() - objectDeque.getIdleObjects().size();
if (maxTotalPerKey > 0) {
int growLimit = Math.max(0,
maxTotalPerKey - objectDeque.getIdleObjects().size());
objectDefecit = Math.min(objectDefecit, growLimit);
}
// Take the maxTotal limit into account
if (maxTotal > 0) {
int growLimit = Math.max(0, maxTotal - getNumActive() - getNumIdle());
objectDefecit = Math.min(objectDefecit, growLimit);
}
return objectDefecit;
}
//--- JMX support ----------------------------------------------------------
@Override
public Map<String,Integer> getNumActivePerKey() {
HashMap<String,Integer> result = new HashMap<String,Integer>();
Iterator<Entry<K,ObjectDeque<T>>> iter = poolMap.entrySet().iterator();
while (iter.hasNext()) {
Entry<K,ObjectDeque<T>> entry = iter.next();
if (entry != null) {
K key = entry.getKey();
ObjectDeque<T> objectDequeue = entry.getValue();
if (key != null && objectDequeue != null) {
result.put(key.toString(), Integer.valueOf(
objectDequeue.getAllObjects().size() -
objectDequeue.getIdleObjects().size()));
}
}
}
return result;
}
/**
* Return an estimate of the number of threads currently blocked waiting for
* an object from the pool. This is intended for monitoring only, not for
* synchronization control.
*/
@Override
public int getNumWaiters() {
int result = 0;
if (getBlockWhenExhausted()) {
Iterator<ObjectDeque<T>> iter = poolMap.values().iterator();
while (iter.hasNext()) {
// Assume no overflow
result += iter.next().getIdleObjects().getTakeQueueLength();
}
}
return result;
}
/**
* Return an estimate of the number of threads currently blocked waiting for
* an object from the pool for the given key. This is intended for
* monitoring only, not for synchronization control.
*/
@Override
public int getNumWaiters(K key) {
if (getBlockWhenExhausted()) {
final ObjectDeque<T> objectDeque = poolMap.get(key);
if (objectDeque == null) {
return 0;
} else {
return objectDeque.getIdleObjects().getTakeQueueLength();
}
} else {
return 0;
}
}
@Override
public List<K> getKeys() {
List<K> keyCopy = new ArrayList<K>();
Lock readLock = keyLock.readLock();
readLock.lock();
try {
keyCopy.addAll(poolKeyList);
} finally {
readLock.unlock();
}
return keyCopy;
}
//--- inner classes ----------------------------------------------
/*
* Maintains information on the per key queue for a given key.
*/
private class ObjectDeque<S> {
private final LinkedBlockingDeque<PooledObject<S>> idleObjects =
new LinkedBlockingDeque<PooledObject<S>>();
/*
* Number of instances created - number destroyed.
* Invariant: createCount <= maxTotalPerKey
*/
private final AtomicInteger createCount = new AtomicInteger(0);
/*
* The map is keyed on pooled instances. Note: pooled instances
* <em>must</em> be distinguishable by equals for this structure to
* work properly.
*/
private final Map<S, PooledObject<S>> allObjects =
new ConcurrentHashMap<S, PooledObject<S>>();
/*
* Number of threads with registered interest in this key.
* register(K) increments this counter and deRegister(K) decrements it.
* Invariant: empty keyed pool will not be dropped unless numInterested
* is 0.
*/
private final AtomicLong numInterested = new AtomicLong(0);
public LinkedBlockingDeque<PooledObject<S>> getIdleObjects() {
return idleObjects;
}
public AtomicInteger getCreateCount() {
return createCount;
}
public AtomicLong getNumInterested() {
return numInterested;
}
public Map<S, PooledObject<S>> getAllObjects() {
return allObjects;
}
}
//--- configuration attributes ---------------------------------------------
private volatile int maxIdlePerKey =
GenericKeyedObjectPoolConfig.DEFAULT_MAX_IDLE_PER_KEY;
private volatile int minIdlePerKey =
GenericKeyedObjectPoolConfig.DEFAULT_MIN_IDLE_PER_KEY;
private volatile int maxTotalPerKey =
GenericKeyedObjectPoolConfig.DEFAULT_MAX_TOTAL_PER_KEY;
private final KeyedPoolableObjectFactory<K,T> factory;
//--- internal attributes --------------------------------------------------
/*
* My hash of sub-pools (ObjectQueue). The list of keys <b>must</b> be kept
* in step with {@link #poolKeyList} using {@link #keyLock} to ensure any
* changes to the list of current keys is made in a thread-safe manner.
*/
private final Map<K,ObjectDeque<T>> poolMap =
new ConcurrentHashMap<K,ObjectDeque<T>>(); // @GuardedBy("keyLock") for write access (and some read access)
/*
* List of pool keys - used to control eviction order. The list of keys
* <b>must</b> be kept in step with {@link #poolMap} using {@link #keyLock}
* to ensure any changes to the list of current keys is made in a
* thread-safe manner.
*/
private final List<K> poolKeyList = new ArrayList<K>(); // @GuardedBy("keyLock")
private final ReadWriteLock keyLock = new ReentrantReadWriteLock(true);
/*
* The combined count of the currently active objects for all keys and those
* in the process of being created. Under load, it may exceed
* {@link #maxTotal} but there will never be more than {@link #maxTotal}
* created at any one time.
*/
private final AtomicInteger numTotal = new AtomicInteger(0);
private Iterator<K> evictionKeyIterator = null; // @GuardedBy("evictionLock")
private K evictionKey = null; // @GuardedBy("evictionLock")
// JMX specific attributes
private static final String ONAME_BASE =
"org.apache.commoms.pool2:type=GenericKeyedObjectPool,name=";
}
| src/main/java/org/apache/commons/pool2/impl/GenericKeyedObjectPool.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.pool2.impl;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.NoSuchElementException;
import java.util.TreeMap;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.commons.pool2.KeyedObjectPool;
import org.apache.commons.pool2.KeyedPoolableObjectFactory;
import org.apache.commons.pool2.PoolUtils;
/**
* A configurable <code>KeyedObjectPool</code> implementation.
* <p>
* When coupled with the appropriate {@link KeyedPoolableObjectFactory},
* <code>GenericKeyedObjectPool</code> provides robust pooling functionality for
* keyed objects. A <code>GenericKeyedObjectPool</code> can be viewed as a map
* of sub-pools, keyed on the (unique) key values provided to the
* {@link #preparePool preparePool}, {@link #addObject addObject} or
* {@link #borrowObject borrowObject} methods. Each time a new key value is
* provided to one of these methods, a sub-new pool is created under the given
* key to be managed by the containing <code>GenericKeyedObjectPool.</code>
* <p>
* Optionally, one may configure the pool to examine and possibly evict objects
* as they sit idle in the pool and to ensure that a minimum number of idle
* objects is maintained for each key. This is performed by an "idle object
* eviction" thread, which runs asynchronously. Caution should be used when
* configuring this optional feature. Eviction runs contend with client threads
* for access to objects in the pool, so if they run too frequently performance
* issues may result.
* <p>
* Implementation note: To prevent possible deadlocks, care has been taken to
* ensure that no call to a factory method will occur within a synchronization
* block. See POOL-125 and DBCP-44 for more information.
*
* @see GenericObjectPool
*
* @param <K> The type of keys maintained by this pool.
* @param <T> Type of element pooled in this pool.
*
* @author Rodney Waldhoff
* @author Dirk Verbeeck
* @author Sandy McArthur
* @version $Revision$ $Date$
*/
public class GenericKeyedObjectPool<K,T> extends BaseGenericObjectPool<T>
implements KeyedObjectPool<K,T>, GenericKeyedObjectPoolMBean<K> {
/**
* Create a new <code>GenericKeyedObjectPool</code> using defaults from
* {@link GenericKeyedObjectPoolConfig}.
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory<K,T> factory) {
this(factory, new GenericKeyedObjectPoolConfig());
}
/**
* Create a new <code>GenericKeyedObjectPool</code> using a specific
* configuration.
*
* @param config The configuration to use for this pool instance. The
* configuration is used by value. Subsequent changes to
* the configuration object will not be reflected in the
* pool.
*/
public GenericKeyedObjectPool(KeyedPoolableObjectFactory<K,T> factory,
GenericKeyedObjectPoolConfig config) {
super(config, ONAME_BASE, config.getJmxNamePrefix());
if (factory == null) {
throw new IllegalArgumentException("factory may not be null");
}
this.factory = factory;
setConfig(config);
startEvictor(getMinEvictableIdleTimeMillis());
}
/**
* Returns the limit on the number of object instances allocated by the pool
* (checked out or idle), per key. When the limit is reached, the sub-pool
* is said to be exhausted. A negative value indicates no limit.
*
* @return the limit on the number of active instances per key
*
* @see #setMaxTotalPerKey
*/
@Override
public int getMaxTotalPerKey() {
return maxTotalPerKey;
}
/**
* Sets the limit on the number of object instances allocated by the pool
* (checked out or idle), per key. When the limit is reached, the sub-pool
* is said to be exhausted. A negative value indicates no limit.
*
* @param maxTotalPerKey the limit on the number of active instances per key
*
* @see #getMaxTotalPerKey
*/
public void setMaxTotalPerKey(int maxTotalPerKey) {
this.maxTotalPerKey = maxTotalPerKey;
}
/**
* Returns the cap on the number of "idle" instances per key in the pool.
* If maxIdle is set too low on heavily loaded systems it is possible you
* will see objects being destroyed and almost immediately new objects
* being created. This is a result of the active threads momentarily
* returning objects faster than they are requesting them them, causing the
* number of idle objects to rise above maxIdle. The best value for maxIdle
* for heavily loaded system will vary but the default is a good starting
* point.
*
* @return the maximum number of "idle" instances that can be held in a
* given keyed sub-pool or a negative value if there is no limit
*
* @see #setMaxIdlePerKey
*/
@Override
public int getMaxIdlePerKey() {
return maxIdlePerKey;
}
/**
* Sets the cap on the number of "idle" instances per key in the pool.
* If maxIdle is set too low on heavily loaded systems it is possible you
* will see objects being destroyed and almost immediately new objects
* being created. This is a result of the active threads momentarily
* returning objects faster than they are requesting them them, causing the
* number of idle objects to rise above maxIdle. The best value for maxIdle
* for heavily loaded system will vary but the default is a good starting
* point.
*
* @param maxIdlePerKey the maximum number of "idle" instances that can be
* held in a given keyed sub-pool. Use a negative value
* for no limit
*
* @see #getMaxIdlePerKey
*/
public void setMaxIdlePerKey(int maxIdlePerKey) {
this.maxIdlePerKey = maxIdlePerKey;
}
/**
* Sets the target for the minimum number of idle objects to maintain in
* each of the keyed sub-pools. This setting only has an effect if it is
* positive and {@link #getTimeBetweenEvictionRunsMillis()} is greater than
* zero. If this is the case, an attempt is made to ensure that each
* sub-pool has the required minimum number of instances during idle object
* eviction runs.
* <p>
* If the configured value of minIdlePerKey is greater than the configured
* value for maxIdlePerKey then the value of maxIdlePerKey will be used
* instead.
*
* @param minIdlePerKey The minimum size of the each keyed pool
*
* @see #getMinIdlePerKey
* @see #getMaxIdlePerKey()
* @see #setTimeBetweenEvictionRunsMillis
*/
public void setMinIdlePerKey(int minIdlePerKey) {
this.minIdlePerKey = minIdlePerKey;
}
/**
* Returns the target for the minimum number of idle objects to maintain in
* each of the keyed sub-pools. This setting only has an effect if it is
* positive and {@link #getTimeBetweenEvictionRunsMillis()} is greater than
* zero. If this is the case, an attempt is made to ensure that each
* sub-pool has the required minimum number of instances during idle object
* eviction runs.
* <p>
* If the configured value of minIdlePerKey is greater than the configured
* value for maxIdlePerKey then the value of maxIdlePerKey will be used
* instead.
*
* @return minimum size of the each keyed pool
*
* @see #setTimeBetweenEvictionRunsMillis
*/
@Override
public int getMinIdlePerKey() {
int maxIdlePerKey = getMaxIdlePerKey();
if (this.minIdlePerKey > maxIdlePerKey) {
return maxIdlePerKey;
} else {
return minIdlePerKey;
}
}
/**
* Sets the configuration.
*
* @param conf the new configuration to use.
*
* @see GenericKeyedObjectPoolConfig
*/
public void setConfig(GenericKeyedObjectPoolConfig conf) {
setLifo(conf.getLifo());
setMaxIdlePerKey(conf.getMaxIdlePerKey());
setMaxTotalPerKey(conf.getMaxTotalPerKey());
setMaxTotal(conf.getMaxTotal());
setMinIdlePerKey(conf.getMinIdlePerKey());
setMaxWaitMillis(conf.getMaxWaitMillis());
setBlockWhenExhausted(conf.getBlockWhenExhausted());
setTestOnBorrow(conf.getTestOnBorrow());
setTestOnReturn(conf.getTestOnReturn());
setTestWhileIdle(conf.getTestWhileIdle());
setNumTestsPerEvictionRun(conf.getNumTestsPerEvictionRun());
setMinEvictableIdleTimeMillis(conf.getMinEvictableIdleTimeMillis());
setSoftMinEvictableIdleTimeMillis(
conf.getSoftMinEvictableIdleTimeMillis());
setTimeBetweenEvictionRunsMillis(
conf.getTimeBetweenEvictionRunsMillis());
setEvictionPolicyClassName(conf.getEvictionPolicyClassName());
}
/**
* Obtain a reference to the factory used to create, destroy and validate
* the objects used by this pool.
*
* @return the factory
*/
public KeyedPoolableObjectFactory<K, T> getFactory() {
return factory;
}
/**
* Equivalent to <code>{@link #borrowObject(Object, long) borrowObject}(key,
* {@link #getMaxWaitMillis()})</code>.
*/
@Override
public T borrowObject(K key) throws Exception {
return borrowObject(key, getMaxWaitMillis());
}
/**
* Borrows an object from the sub-pool associated with the given key using
* the specified waiting time which only applies if
* {@link #getBlockWhenExhausted()} is true.
* <p>
* If there is one or more idle instances available in the sub-pool
* associated with the given key, then an idle instance will be selected
* based on the value of {@link #getLifo()}, activated and returned. If
* activation fails, or {@link #getTestOnBorrow() testOnBorrow} is set to
* <code>true</code> and validation fails, the instance is destroyed and the
* next available instance is examined. This continues until either a valid
* instance is returned or there are no more idle instances available.
* <p>
* If there are no idle instances available in the sub-pool associated with
* the given key, behavior depends on the {@link #getMaxTotalPerKey()
* maxTotalPerKey}, {@link #getMaxTotal() maxTotal}, and (if applicable)
* {@link #getBlockWhenExhausted()} and the value passed in to the
* <code>borrowMaxWait</code> parameter. If the number of instances checked
* out from the sub-pool under the given key is less than
* <code>maxTotalPerKey</code> and the total number of instances in
* circulation (under all keys) is less than <code>maxTotal</code>, a new
* instance is created, activated and (if applicable) validated and returned
* to the caller.
* <p>
* If the associated sub-pool is exhausted (no available idle instances and
* no capacity to create new ones), this method will either block
* ({@link #getBlockWhenExhausted()} is true) or throw a
* <code>NoSuchElementException</code>
* ({@link #getBlockWhenExhausted()} is false).
* The length of time that this method will block when
* {@link #getBlockWhenExhausted()} is true is determined by the value
* passed in to the <code>borrowMaxWait</code> parameter.
* <p>
* When <code>maxTotal</code> is set to a positive value and this method is
* invoked when at the limit with no idle instances available, an attempt is
* made to create room by clearing the oldest 15% of the elements from the
* keyed sub-pools.
* <p>
* When the pool is exhausted, multiple calling threads may be
* simultaneously blocked waiting for instances to become available. A
* "fairness" algorithm has been implemented to ensure that threads receive
* available instances in request arrival order.
*
* @param key pool key
* @param borrowMaxWait maximum amount of time to wait (in milliseconds)
*
* @return object instance from the keyed pool
*
* @throws NoSuchElementException if a keyed object instance cannot be
* returned.
*/
public T borrowObject(K key, long borrowMaxWait) throws Exception {
assertOpen();
PooledObject<T> p = null;
// Get local copy of current config so it is consistent for entire
// method execution
boolean blockWhenExhausted = getBlockWhenExhausted();
boolean create;
long waitTime = 0;
ObjectDeque<T> objectDeque = register(key);
try {
while (p == null) {
create = false;
if (blockWhenExhausted) {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null && objectDeque != null) {
if (borrowMaxWait < 0) {
p = objectDeque.getIdleObjects().takeFirst();
} else {
waitTime = System.currentTimeMillis();
p = objectDeque.getIdleObjects().pollFirst(
borrowMaxWait, TimeUnit.MILLISECONDS);
waitTime = System.currentTimeMillis() - waitTime;
}
}
if (p == null) {
throw new NoSuchElementException(
"Timeout waiting for idle object");
}
if (!p.allocate()) {
p = null;
}
} else {
if (objectDeque != null) {
p = objectDeque.getIdleObjects().pollFirst();
}
if (p == null) {
create = true;
p = create(key);
}
if (p == null) {
throw new NoSuchElementException("Pool exhausted");
}
if (!p.allocate()) {
p = null;
}
}
if (p != null) {
try {
factory.activateObject(key, p.getObject());
} catch (Exception e) {
try {
destroy(key, p, true);
} catch (Exception e1) {
// Ignore - activation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to activate object");
nsee.initCause(e);
throw nsee;
}
}
if (p != null && getTestOnBorrow()) {
boolean validate = false;
Throwable validationThrowable = null;
try {
validate = factory.validateObject(key, p.getObject());
} catch (Throwable t) {
PoolUtils.checkRethrow(t);
}
if (!validate) {
try {
destroy(key, p, true);
destroyedByBorrowValidationCount.incrementAndGet();
} catch (Exception e) {
// Ignore - validation failure is more important
}
p = null;
if (create) {
NoSuchElementException nsee = new NoSuchElementException(
"Unable to validate object");
nsee.initCause(validationThrowable);
throw nsee;
}
}
}
}
}
} finally {
deregister(key);
}
updateStatsBorrow(p, waitTime);
return p.getObject();
}
/**
* Returns an object to a keyed sub-pool.
* <p>
* If {@link #getMaxIdlePerKey() maxIdle} is set to a positive value and the
* number of idle instances under the given key has reached this value, the
* returning instance is destroyed.
* <p>
* If {@link #getTestOnReturn() testOnReturn} == true, the returning
* instance is validated before being returned to the idle instance sub-pool
* under the given key. In this case, if validation fails, the instance is
* destroyed.
* <p>
* Exceptions encountered destroying objects for any reason are swallowed
* but remain accessible via {@link #getSwallowedExceptions()}.
*
* @param key pool key
* @param obj instance to return to the keyed pool
*
* @throws IllegalStateException if an object is returned to the pool that
* was not borrowed from it or if an object is
* returned to the pool multiple times
*/
@Override
public void returnObject(K key, T obj) {
ObjectDeque<T> objectDeque = poolMap.get(key);
PooledObject<T> p = objectDeque.getAllObjects().get(obj);
if (p == null) {
throw new IllegalStateException(
"Returned object not currently part of this pool");
}
long activeTime = p.getActiveTimeMillis();
if (getTestOnReturn()) {
if (!factory.validateObject(key, obj)) {
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
updateStatsReturn(activeTime);
return;
}
}
try {
factory.passivateObject(key, obj);
} catch (Exception e1) {
swallowException(e1);
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
updateStatsReturn(activeTime);
return;
}
if (!p.deallocate()) {
throw new IllegalStateException(
"Object has already been retured to this pool");
}
int maxIdle = getMaxIdlePerKey();
LinkedBlockingDeque<PooledObject<T>> idleObjects =
objectDeque.getIdleObjects();
if (isClosed() || maxIdle > -1 && maxIdle <= idleObjects.size()) {
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
} else {
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
if (hasBorrowWaiters()) {
reuseCapacity();
}
updateStatsReturn(activeTime);
}
/**
* {@inheritDoc}
* <p>
* Activation of this method decrements the active count associated with
* the given keyed pool and attempts to destroy <code>obj.</code>
*
* @param key pool key
* @param obj instance to invalidate
*
* @throws Exception if an exception occurs destroying the
* object
* @throws IllegalStateException if obj does not belong to the pool
* under the given key
*/
@Override
public void invalidateObject(K key, T obj) throws Exception {
ObjectDeque<T> objectDeque = poolMap.get(key);
PooledObject<T> p = objectDeque.getAllObjects().get(obj);
if (p == null) {
throw new IllegalStateException(
"Object not currently part of this pool");
}
destroy(key, p, true);
}
/**
* Clears any objects sitting idle in the pool by removing them from the
* idle instance sub-pools and then invoking the configured
* PoolableObjectFactory's
* {@link KeyedPoolableObjectFactory#destroyObject(Object, Object)} method
* on each idle instance.
* <p>
* Implementation notes:
* <ul>
* <li>This method does not destroy or effect in any way instances that are
* checked out when it is invoked.</li>
* <li>Invoking this method does not prevent objects being returned to the
* idle instance pool, even during its execution. Additional instances may
* be returned while removed items are being destroyed.</li>
* <li>Exceptions encountered destroying idle instances are swallowed but
* remain accessible via {@link #getSwallowedExceptions()}.</li>
* </ul>
*/
@Override
public void clear() {
Iterator<K> iter = poolMap.keySet().iterator();
while (iter.hasNext()) {
clear(iter.next());
}
}
/**
* Clears the specified sub-pool, removing all pooled instances
* corresponding to the given <code>key</code>. Exceptions encountered
* destroying idle instances are swallowed but remain accessible via
* {@link #getSwallowedExceptions()}.
*
* @param key the key to clear
*/
@Override
public void clear(K key) {
ObjectDeque<T> objectDeque = register(key);
try {
LinkedBlockingDeque<PooledObject<T>> idleObjects =
objectDeque.getIdleObjects();
PooledObject<T> p = idleObjects.poll();
while (p != null) {
try {
destroy(key, p, true);
} catch (Exception e) {
swallowException(e);
}
p = idleObjects.poll();
}
} finally {
deregister(key);
}
}
/**
* Returns the total number of instances current borrowed from this pool but
* not yet returned.
*/
@Override
public int getNumActive() {
return numTotal.get() - getNumIdle();
}
@Override
public int getNumIdle() {
Iterator<ObjectDeque<T>> iter = poolMap.values().iterator();
int result = 0;
while (iter.hasNext()) {
result += iter.next().getIdleObjects().size();
}
return result;
}
/**
* Returns the number of instances currently borrowed from but not yet
* returned to the sub-pool corresponding to the given <code>key</code>.
*
* @param key the key to query
*/
@Override
public int getNumActive(K key) {
final ObjectDeque<T> objectDeque = poolMap.get(key);
if (objectDeque != null) {
return objectDeque.getAllObjects().size() -
objectDeque.getIdleObjects().size();
} else {
return 0;
}
}
/**
* Returns the number of idle instances in the sub-pool corresponding to the
* given <code>key</code>.
*
* @param key the key to query
*/
@Override
public int getNumIdle(K key) {
final ObjectDeque<T> objectDeque = poolMap.get(key);
return objectDeque != null ? objectDeque.getIdleObjects().size() : 0;
}
/**
* Closes the keyed object pool. Once the pool is closed,
* {@link #borrowObject(Object)} will fail with IllegalStateException, but
* {@link #returnObject(Object, Object)} and
* {@link #invalidateObject(Object, Object)} will continue to work, with
* returned objects destroyed on return.
* <p>
* Destroys idle instances in the pool by invoking {@link #clear()}.
*/
@Override
public void close() {
if (isClosed()) {
return;
}
synchronized (closeLock) {
if (isClosed()) {
return;
}
// Stop the evictor before the pool is closed since evict() calls
// assertOpen()
startEvictor(-1L);
closed = true;
// This clear removes any idle objects
clear();
jmxUnregister();
// Release any threads that were waiting for an object
Iterator<ObjectDeque<T>> iter = poolMap.values().iterator();
while (iter.hasNext()) {
iter.next().getIdleObjects().interuptTakeWaiters();
}
// This clear cleans up the keys now any waiting threads have been
// interrupted
clear();
}
}
/**
* Clears oldest 15% of objects in pool. The method sorts the objects into
* a TreeMap and then iterates the first 15% for removal.
*/
public void clearOldest() {
// build sorted map of idle objects
final Map<PooledObject<T>, K> map = new TreeMap<PooledObject<T>, K>();
for (K k : poolMap.keySet()) {
final LinkedBlockingDeque<PooledObject<T>> idleObjects =
poolMap.get(k).getIdleObjects();
for (PooledObject<T> p : idleObjects) {
// each item into the map using the PooledObject object as the
// key. It then gets sorted based on the idle time
map.put(p, k);
}
}
// Now iterate created map and kill the first 15% plus one to account
// for zero
int itemsToRemove = ((int) (map.size() * 0.15)) + 1;
Iterator<Map.Entry<PooledObject<T>, K>> iter =
map.entrySet().iterator();
while (iter.hasNext() && itemsToRemove > 0) {
Map.Entry<PooledObject<T>, K> entry = iter.next();
// kind of backwards on naming. In the map, each key is the
// PooledObject because it has the ordering with the timestamp
// value. Each value that the key references is the key of the
// list it belongs to.
K key = entry.getValue();
PooledObject<T> p = entry.getKey();
// Assume the destruction succeeds
boolean destroyed = true;
try {
destroyed = destroy(key, p, false);
} catch (Exception e) {
swallowException(e);
}
if (destroyed) {
itemsToRemove--;
}
}
}
/**
* Attempt to create one new instance to serve from the most heavily
* loaded pool that can add a new instance.
*
* This method exists to ensure liveness in the pool when threads are
* parked waiting and capacity to create instances under the requested keys
* subsequently becomes available.
*
* This method is not guaranteed to create an instance and its selection
* of the most loaded pool that can create an instance may not always be
* correct, since it does not lock the pool and instances may be created,
* borrowed, returned or destroyed by other threads while it is executing.
*/
private void reuseCapacity() {
final int maxTotalPerKey = getMaxTotalPerKey();
// Find the most loaded pool that could take a new instance
int maxQueueLength = 0;
LinkedBlockingDeque<PooledObject<T>> mostLoaded = null;
K loadedKey = null;
for (K k : poolMap.keySet()) {
final ObjectDeque<T> deque = poolMap.get(k);
if (deque != null) {
final LinkedBlockingDeque<PooledObject<T>> pool = deque.getIdleObjects();
final int queueLength = pool.getTakeQueueLength();
if (getNumActive(k) < maxTotalPerKey && queueLength > maxQueueLength) {
maxQueueLength = queueLength;
mostLoaded = pool;
loadedKey = k;
}
}
}
// Attempt to add an instance to the most loaded pool
if (mostLoaded != null) {
register(loadedKey);
try {
PooledObject<T> p = create(loadedKey);
if (p != null) {
addIdleObject(loadedKey, p);
}
} catch (Exception e) {
swallowException(e);
} finally {
deregister(loadedKey);
}
}
}
private boolean hasBorrowWaiters() {
for (K k : poolMap.keySet()) {
final ObjectDeque<T> deque = poolMap.get(k);
if (deque != null) {
final LinkedBlockingDeque<PooledObject<T>> pool =
deque.getIdleObjects();
if(pool.hasTakeWaiters()) {
return true;
}
}
}
return false;
}
/**
* {@inheritDoc}
* <p>
* Successive activations of this method examine objects in keyed sub-pools
* in sequence, cycling through the keys and examining objects in
* oldest-to-youngest order within the keyed sub-pools.
*/
@Override
public void evict() throws Exception {
assertOpen();
if (getNumIdle() == 0) {
return;
}
PooledObject<T> underTest = null;
EvictionPolicy<T> evictionPolicy = getEvictionPolicy();
synchronized (evictionLock) {
EvictionConfig evictionConfig = new EvictionConfig(
getMinEvictableIdleTimeMillis(),
getSoftMinEvictableIdleTimeMillis(),
getMinIdlePerKey());
boolean testWhileIdle = getTestWhileIdle();
LinkedBlockingDeque<PooledObject<T>> idleObjects = null;
for (int i = 0, m = getNumTests(); i < m; i++) {
if(evictionIterator == null || !evictionIterator.hasNext()) {
if (evictionKeyIterator == null ||
!evictionKeyIterator.hasNext()) {
List<K> keyCopy = new ArrayList<K>();
Lock readLock = keyLock.readLock();
readLock.lock();
try {
keyCopy.addAll(poolKeyList);
} finally {
readLock.unlock();
}
evictionKeyIterator = keyCopy.iterator();
}
while (evictionKeyIterator.hasNext()) {
evictionKey = evictionKeyIterator.next();
ObjectDeque<T> objectDeque = poolMap.get(evictionKey);
if (objectDeque == null) {
continue;
}
idleObjects = objectDeque.getIdleObjects();
if (getLifo()) {
evictionIterator = idleObjects.descendingIterator();
} else {
evictionIterator = idleObjects.iterator();
}
if (evictionIterator.hasNext()) {
break;
}
evictionIterator = null;
}
}
if (evictionIterator == null) {
// Pools exhausted
return;
}
try {
underTest = evictionIterator.next();
} catch (NoSuchElementException nsee) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
evictionIterator = null;
continue;
}
if (!underTest.startEvictionTest()) {
// Object was borrowed in another thread
// Don't count this as an eviction test so reduce i;
i--;
continue;
}
if (evictionPolicy.evict(evictionConfig, underTest,
poolMap.get(evictionKey).getIdleObjects().size())) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
if (testWhileIdle) {
boolean active = false;
try {
factory.activateObject(evictionKey,
underTest.getObject());
active = true;
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
if (active) {
if (!factory.validateObject(evictionKey,
underTest.getObject())) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
} else {
try {
factory.passivateObject(evictionKey,
underTest.getObject());
} catch (Exception e) {
destroy(evictionKey, underTest, true);
destroyedByEvictorCount.incrementAndGet();
}
}
}
}
if (!underTest.endEvictionTest(idleObjects)) {
// TODO - May need to add code here once additional
// states are used
}
}
}
}
}
private PooledObject<T> create(K key) throws Exception {
int maxTotalPerKey = getMaxTotalPerKey(); // Per key
int maxTotal = getMaxTotal(); // All keys
// Check against the overall limit
boolean loop = true;
while (loop) {
int newNumTotal = numTotal.incrementAndGet();
if (maxTotal > -1 && newNumTotal > maxTotal) {
numTotal.decrementAndGet();
if (getNumIdle() == 0) {
return null;
} else {
clearOldest();
}
} else {
loop = false;
}
}
ObjectDeque<T> objectDeque = poolMap.get(key);
long newCreateCount = objectDeque.getCreateCount().incrementAndGet();
// Check against the per key limit
if (maxTotalPerKey > -1 && newCreateCount > maxTotalPerKey ||
newCreateCount > Integer.MAX_VALUE) {
numTotal.decrementAndGet();
objectDeque.getCreateCount().decrementAndGet();
return null;
}
T t = null;
try {
t = factory.makeObject(key);
} catch (Exception e) {
numTotal.decrementAndGet();
throw e;
}
PooledObject<T> p = new PooledObject<T>(t);
createdCount.incrementAndGet();
objectDeque.getAllObjects().put(t, p);
return p;
}
private boolean destroy(K key, PooledObject<T> toDestroy, boolean always)
throws Exception {
ObjectDeque<T> objectDeque = register(key);
try {
boolean isIdle = objectDeque.getIdleObjects().remove(toDestroy);
if (isIdle || always) {
objectDeque.getAllObjects().remove(toDestroy.getObject());
toDestroy.invalidate();
try {
factory.destroyObject(key, toDestroy.getObject());
} finally {
objectDeque.getCreateCount().decrementAndGet();
destroyedCount.incrementAndGet();
numTotal.decrementAndGet();
}
return true;
} else {
return false;
}
} finally {
deregister(key);
}
}
/*
* register() and deregister() must always be used as a pair.
*/
private ObjectDeque<T> register(K k) {
Lock lock = keyLock.readLock();
ObjectDeque<T> objectDeque = null;
try {
lock.lock();
objectDeque = poolMap.get(k);
if (objectDeque == null) {
// Upgrade to write lock
lock.unlock();
lock = keyLock.writeLock();
lock.lock();
objectDeque = poolMap.get(k);
if (objectDeque == null) {
objectDeque = new ObjectDeque<T>();
objectDeque.getNumInterested().incrementAndGet();
// NOTE: Keys must always be added to both poolMap and
// poolKeyList at the same time while protected by
// keyLock.writeLock()
poolMap.put(k, objectDeque);
poolKeyList.add(k);
} else {
objectDeque.getNumInterested().incrementAndGet();
}
} else {
objectDeque.getNumInterested().incrementAndGet();
}
} finally {
lock.unlock();
}
return objectDeque;
}
/*
* register() and deregister() must always be used as a pair.
*/
private void deregister(K k) {
ObjectDeque<T> objectDeque;
// TODO Think carefully about when a read lock is required
objectDeque = poolMap.get(k);
long numInterested = objectDeque.getNumInterested().decrementAndGet();
if (numInterested == 0 && objectDeque.getCreateCount().get() == 0) {
// Potential to remove key
Lock writeLock = keyLock.writeLock();
writeLock.lock();
try {
if (objectDeque.getCreateCount().get() == 0 &&
objectDeque.getNumInterested().get() == 0) {
// NOTE: Keys must always be removed from both poolMap and
// poolKeyList at the same time while protected by
// keyLock.writeLock()
poolMap.remove(k);
poolKeyList.remove(k);
}
} finally {
writeLock.unlock();
}
}
}
@Override
void ensureMinIdle() throws Exception {
int minIdlePerKey = getMinIdlePerKey();
if (minIdlePerKey < 1) {
return;
}
for (K k : poolMap.keySet()) {
ensureMinIdle(k);
}
}
private void ensureMinIdle(K key) throws Exception {
// Calculate current pool objects
ObjectDeque<T> objectDeque = poolMap.get(key);
// this method isn't synchronized so the
// calculateDeficit is done at the beginning
// as a loop limit and a second time inside the loop
// to stop when another thread already returned the
// needed objects
int deficit = calculateDeficit(objectDeque);
for (int i = 0; i < deficit && calculateDeficit(objectDeque) > 0; i++) {
addObject(key);
}
}
/**
* Create an object using the {@link KeyedPoolableObjectFactory#makeObject
* factory}, passivate it, and then place it in the idle object pool.
* <code>addObject</code> is useful for "pre-loading" a pool with idle
* objects.
*
* @param key the key a new instance should be added to
*
* @throws Exception when {@link KeyedPoolableObjectFactory#makeObject}
* fails.
*/
@Override
public void addObject(K key) throws Exception {
assertOpen();
register(key);
try {
PooledObject<T> p = create(key);
addIdleObject(key, p);
} finally {
deregister(key);
}
}
private void addIdleObject(K key, PooledObject<T> p) throws Exception {
if (p != null) {
factory.passivateObject(key, p.getObject());
LinkedBlockingDeque<PooledObject<T>> idleObjects =
poolMap.get(key).getIdleObjects();
if (getLifo()) {
idleObjects.addFirst(p);
} else {
idleObjects.addLast(p);
}
}
}
/**
* Registers a key for pool control and ensures that
* {@link #getMinIdlePerKey()} idle instances are created.
*
* @param key - The key to register for pool control.
*/
public void preparePool(K key) throws Exception {
int minIdlePerKey = getMinIdlePerKey();
if (minIdlePerKey < 1) {
return;
}
ensureMinIdle(key);
}
private int getNumTests() {
int totalIdle = getNumIdle();
int numTests = getNumTestsPerEvictionRun();
if (numTests >= 0) {
return Math.min(numTests, totalIdle);
}
return(int)(Math.ceil(totalIdle/Math.abs((double)numTests)));
}
private int calculateDeficit(ObjectDeque<T> objectDeque) {
if (objectDeque == null) {
return getMinIdlePerKey();
}
// Used more than once so keep a local copy so the value is consistent
int maxTotal = getMaxTotal();
int maxTotalPerKey = getMaxTotalPerKey();
int objectDefecit = 0;
// Calculate no of objects needed to be created, in order to have
// the number of pooled objects < maxTotalPerKey();
objectDefecit = getMinIdlePerKey() - objectDeque.getIdleObjects().size();
if (maxTotalPerKey > 0) {
int growLimit = Math.max(0,
maxTotalPerKey - objectDeque.getIdleObjects().size());
objectDefecit = Math.min(objectDefecit, growLimit);
}
// Take the maxTotal limit into account
if (maxTotal > 0) {
int growLimit = Math.max(0, maxTotal - getNumActive() - getNumIdle());
objectDefecit = Math.min(objectDefecit, growLimit);
}
return objectDefecit;
}
//--- JMX support ----------------------------------------------------------
@Override
public Map<String,Integer> getNumActivePerKey() {
HashMap<String,Integer> result = new HashMap<String,Integer>();
Iterator<Entry<K,ObjectDeque<T>>> iter = poolMap.entrySet().iterator();
while (iter.hasNext()) {
Entry<K,ObjectDeque<T>> entry = iter.next();
if (entry != null) {
K key = entry.getKey();
ObjectDeque<T> objectDequeue = entry.getValue();
if (key != null && objectDequeue != null) {
result.put(key.toString(), Integer.valueOf(
objectDequeue.getAllObjects().size() -
objectDequeue.getIdleObjects().size()));
}
}
}
return result;
}
/**
* Return an estimate of the number of threads currently blocked waiting for
* an object from the pool. This is intended for monitoring only, not for
* synchronization control.
*/
@Override
public int getNumWaiters() {
int result = 0;
if (getBlockWhenExhausted()) {
Iterator<ObjectDeque<T>> iter = poolMap.values().iterator();
while (iter.hasNext()) {
// Assume no overflow
result += iter.next().getIdleObjects().getTakeQueueLength();
}
}
return result;
}
/**
* Return an estimate of the number of threads currently blocked waiting for
* an object from the pool for the given key. This is intended for
* monitoring only, not for synchronization control.
*/
@Override
public int getNumWaiters(K key) {
if (getBlockWhenExhausted()) {
final ObjectDeque<T> objectDeque = poolMap.get(key);
if (objectDeque == null) {
return 0;
} else {
return objectDeque.getIdleObjects().getTakeQueueLength();
}
} else {
return 0;
}
}
@Override
public List<K> getKeys() {
List<K> keyCopy = new ArrayList<K>();
Lock readLock = keyLock.readLock();
readLock.lock();
try {
keyCopy.addAll(poolKeyList);
} finally {
readLock.unlock();
}
return keyCopy;
}
//--- inner classes ----------------------------------------------
/*
* Maintains information on the per key queue for a given key.
*/
private class ObjectDeque<S> {
private final LinkedBlockingDeque<PooledObject<S>> idleObjects =
new LinkedBlockingDeque<PooledObject<S>>();
/*
* Number of instances created - number destroyed.
* Invariant: createCount <= maxTotalPerKey
*/
private final AtomicInteger createCount = new AtomicInteger(0);
/*
* The map is keyed on pooled instances. Note: pooled instances
* <em>must</em> be distinguishable by equals for this structure to
* work properly.
*/
private final Map<S, PooledObject<S>> allObjects =
new ConcurrentHashMap<S, PooledObject<S>>();
/*
* Number of threads with registered interest in this key.
* register(K) increments this counter and deRegister(K) decrements it.
* Invariant: empty keyed pool will not be dropped unless numInterested
* is 0.
*/
private final AtomicLong numInterested = new AtomicLong(0);
public LinkedBlockingDeque<PooledObject<S>> getIdleObjects() {
return idleObjects;
}
public AtomicInteger getCreateCount() {
return createCount;
}
public AtomicLong getNumInterested() {
return numInterested;
}
public Map<S, PooledObject<S>> getAllObjects() {
return allObjects;
}
}
//--- configuration attributes ---------------------------------------------
private volatile int maxIdlePerKey =
GenericKeyedObjectPoolConfig.DEFAULT_MAX_IDLE_PER_KEY;
private volatile int minIdlePerKey =
GenericKeyedObjectPoolConfig.DEFAULT_MIN_IDLE_PER_KEY;
private volatile int maxTotalPerKey =
GenericKeyedObjectPoolConfig.DEFAULT_MAX_TOTAL_PER_KEY;
private final KeyedPoolableObjectFactory<K,T> factory;
//--- internal attributes --------------------------------------------------
/*
* My hash of sub-pools (ObjectQueue). The list of keys <b>must</b> be kept
* in step with {@link #poolKeyList} using {@link #keyLock} to ensure any
* changes to the list of current keys is made in a thread-safe manner.
*/
private final Map<K,ObjectDeque<T>> poolMap =
new ConcurrentHashMap<K,ObjectDeque<T>>(); // @GuardedBy("keyLock") for write access (and some read access)
/*
* List of pool keys - used to control eviction order. The list of keys
* <b>must</b> be kept in step with {@link #poolMap} using {@link #keyLock}
* to ensure any changes to the list of current keys is made in a
* thread-safe manner.
*/
private final List<K> poolKeyList = new ArrayList<K>(); // @GuardedBy("keyLock")
private final ReadWriteLock keyLock = new ReentrantReadWriteLock(true);
/*
* The combined count of the currently active objects for all keys and those
* in the process of being created. Under load, it may exceed
* {@link #maxTotal} but there will never be more than {@link #maxTotal}
* created at any one time.
*/
private final AtomicInteger numTotal = new AtomicInteger(0);
private Iterator<K> evictionKeyIterator = null; // @GuardedBy("evictionLock")
private K evictionKey = null; // @GuardedBy("evictionLock")
// JMX specific attributes
private static final String ONAME_BASE =
"org.apache.commoms.pool2:type=GenericKeyedObjectPool,name=";
}
| No further read locks required but a few places where an NPE may occur.
git-svn-id: a66ef3f0e6c00b14098e182847b4bd646263fa09@1333153 13f79535-47bb-0310-9956-ffa450edef68
| src/main/java/org/apache/commons/pool2/impl/GenericKeyedObjectPool.java | No further read locks required but a few places where an NPE may occur. |
|
Java | apache-2.0 | 553fc39875a40ed7486745ca27c14a7da307760c | 0 | asciidoctor/asciidoctor-intellij-plugin,asciidoctor/asciidoctor-intellij-plugin,asciidoctor/asciidoctor-intellij-plugin,asciidoctor/asciidoctor-intellij-plugin,asciidoctor/asciidoctor-intellij-plugin | package org.asciidoc.intellij.grazie;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.TokenType;
import com.intellij.psi.tree.TokenSet;
import org.asciidoc.intellij.lexer.AsciiDocTokenTypes;
import org.asciidoc.intellij.parser.AsciiDocElementTypes;
import org.asciidoc.intellij.psi.AsciiDocAttributeDeclarationImpl;
import org.asciidoc.intellij.psi.AsciiDocAttributeReference;
import org.asciidoc.intellij.psi.AsciiDocInlineMacro;
import org.asciidoc.intellij.psi.AsciiDocLink;
import org.asciidoc.intellij.psi.AsciiDocRef;
import org.asciidoc.intellij.psi.AsciiDocUrl;
import org.jetbrains.annotations.NotNull;
public class AsciiDocLanguageSupport {
public enum Behavior {
/**
* A PSI element that contains a nested text and should be ignored.
* <p>
* Example: The "Headline" is a nested element in the section; it is treated as its own sentence.
* <p>
* <pre>
* == Headline
* More text
* </pre>
* <p>
*/
ABSORB,
/**
* A PSI element that <b>WILL NOT</b> be printed, adjacent text is part of the same word.
* <p>
* Example: the "**" would be STEALTH<br>
* <code>**b**old</code> is one word "bold"
*/
STEALTH,
/**
* A PSI that contains text that should be spell and grammar checked.
* <p>
* Example: the "b" and "old" would be TEXT<br>
* <code>**b**old</code> is one word "bold"
*/
TEXT,
/**
* A PSI that that <b>WILL</b> be printed, adjacent text represents different words.
* <p>
* Example: the {@code ->} would be "SEPARATE"<br>
* {@code one->two} is two words "one" and "two"
*/
SEPARATE,
/**
* A PSI that that <b>WILL</b> be printed, but with unknown text.
* <p>
* Example: an attribute {@code a {attr}} would print some text <br>
* but the contents are unknown.
*/
UNKNOWN
}
// all tokens that contain full sentences that can be checked for grammar and spelling.
private static final TokenSet NODES_TO_CHECK = TokenSet.create(
AsciiDocTokenTypes.LINE_COMMENT,
AsciiDocTokenTypes.BLOCK_COMMENT,
AsciiDocTokenTypes.LITERAL_BLOCK,
AsciiDocTokenTypes.BLOCKREFTEXT,
AsciiDocElementTypes.SECTION,
AsciiDocElementTypes.TITLE,
AsciiDocElementTypes.BLOCK,
AsciiDocElementTypes.DESCRIPTION_ITEM,
AsciiDocElementTypes.LIST_ITEM,
AsciiDocElementTypes.CELL,
AsciiDocElementTypes.HEADING
);
/** All tokens that contain full sentences that can be checked for grammar and spelling.
* The contents of these tokens will not be forwarded to the grammar or spell checker.
* Example: END_OF_SENTENCE is in the Text category, as it needs to be passed to the grammar checker as a ".", "?" or other text,
* so that the grammar checker recognizes the end of a sentence.
*/
private static final TokenSet SEPARATOR_TOKENS = TokenSet.create(
AsciiDocTokenTypes.ARROW,
AsciiDocTokenTypes.LBRACKET,
AsciiDocTokenTypes.RBRACKET,
AsciiDocTokenTypes.LPAREN,
AsciiDocTokenTypes.RPAREN,
AsciiDocTokenTypes.DOUBLE_QUOTE,
AsciiDocTokenTypes.SINGLE_QUOTE,
AsciiDocTokenTypes.TYPOGRAPHIC_DOUBLE_QUOTE_START,
AsciiDocTokenTypes.TYPOGRAPHIC_DOUBLE_QUOTE_END,
AsciiDocTokenTypes.ASSIGNMENT,
AsciiDocTokenTypes.CELLSEPARATOR,
AsciiDocTokenTypes.BULLET,
AsciiDocTokenTypes.ENUMERATION,
AsciiDocTokenTypes.ADMONITION,
AsciiDocTokenTypes.CALLOUT
);
// all tokens that contain text that is part of a sentence and can be a sub-node of the elements above
private static final TokenSet TEXT_TOKENS = TokenSet.orSet(TokenSet.create(
AsciiDocTokenTypes.TEXT,
AsciiDocTokenTypes.ITALIC,
AsciiDocTokenTypes.BOLD,
AsciiDocTokenTypes.BOLDITALIC,
AsciiDocTokenTypes.MONO,
AsciiDocTokenTypes.MONOBOLD,
AsciiDocTokenTypes.DESCRIPTION,
AsciiDocTokenTypes.DESCRIPTION_END, // for now, keep this as text until it is split into its own root element
AsciiDocTokenTypes.MACROTEXT,
AsciiDocTokenTypes.BLOCKREFTEXT,
AsciiDocTokenTypes.REFTEXT,
AsciiDocTokenTypes.MONOITALIC,
AsciiDocTokenTypes.MONOBOLDITALIC,
AsciiDocTokenTypes.END_OF_SENTENCE,
AsciiDocTokenTypes.PASSTRHOUGH_CONTENT,
AsciiDocTokenTypes.LT,
AsciiDocTokenTypes.GT,
AsciiDocTokenTypes.TYPOGRAPHIC_SINGLE_QUOTE_START,
AsciiDocTokenTypes.TYPOGRAPHIC_SINGLE_QUOTE_END,
AsciiDocTokenTypes.LPAREN,
AsciiDocTokenTypes.RPAREN,
AsciiDocTokenTypes.LBRACKET,
AsciiDocTokenTypes.RBRACKET,
AsciiDocTokenTypes.BULLET,
AsciiDocTokenTypes.ATTRIBUTE_VAL, // will only get here if attribute is classified to contain spell checkable content
AsciiDocTokenTypes.ATTRIBUTE_CONTINUATION,
AsciiDocTokenTypes.ATTRIBUTE_CONTINUATION_LEGACY,
// keep the white space in here as blanks are necessary to separate words
AsciiDocTokenTypes.WHITE_SPACE,
AsciiDocTokenTypes.WHITE_SPACE_MONO,
AsciiDocTokenTypes.HEADING_TOKEN,
AsciiDocTokenTypes.HEADING_OLDSTYLE,
TokenType.WHITE_SPACE,
AsciiDocElementTypes.URL, // can nest MACROTEXT, or will show the URL_LINK or URL_EMAIL as default
AsciiDocElementTypes.REF, // can nest REFTEXT
AsciiDocElementTypes.LINK, // can nest MACROTEXT
AsciiDocElementTypes.INLINE_MACRO, // can nest MACROTEXT
AsciiDocElementTypes.MONO, // will nest MONO
AsciiDocElementTypes.ITALIC // will nest ITALIC
), NODES_TO_CHECK);
public Behavior getElementBehavior(@NotNull PsiElement root, @NotNull PsiElement child) {
if (root != child && NODES_TO_CHECK.contains(child.getNode().getElementType())) {
return Behavior.ABSORB;
} else if (root == child && child instanceof AsciiDocAttributeDeclarationImpl) {
if (((AsciiDocAttributeDeclarationImpl) child).hasSpellCheckableContent()) {
return Behavior.TEXT;
} else {
return Behavior.ABSORB;
}
} else if (child instanceof AsciiDocAttributeReference) {
return Behavior.UNKNOWN;
} else if (SEPARATOR_TOKENS.contains(child.getNode().getElementType())) {
return Behavior.SEPARATE;
} else if (root != child && child instanceof AsciiDocInlineMacro && ((AsciiDocInlineMacro) child).getMacroName().equals("footnote")) {
return Behavior.ABSORB;
} else if (
// A link or URL can contain either a macro text or no text.
// AsciiDoc will display the macro text, or the link/email address if no such text is provided.
// Pass on the content that would be displayed by AsciiDoc to the grammar check.
(child.getNode().getElementType() == AsciiDocTokenTypes.URL_LINK || child.getNode().getElementType() == AsciiDocTokenTypes.URL_EMAIL) &&
isChildOfLinkOrUrl(child)) {
boolean macroTextPresent = false;
ASTNode node = child.getNode();
while (node != null) {
if (node.getElementType() == AsciiDocTokenTypes.MACROTEXT) {
macroTextPresent = true;
break;
}
node = node.getTreeNext();
}
if (macroTextPresent) {
return Behavior.STEALTH;
} else {
return Behavior.TEXT;
}
} else if (child instanceof AsciiDocInlineMacro
|| (child instanceof AsciiDocLink && ((AsciiDocLink) child).getMacroName().equals("xref"))
|| (child instanceof AsciiDocRef)) {
// an inline macro or an xref will be treated as unknown if they don't contain text
LookingForMacroTextVisitor visitor = new LookingForMacroTextVisitor();
child.accept(visitor);
if (visitor.hasFound()) {
return Behavior.TEXT;
} else {
return Behavior.UNKNOWN;
}
} else if (TEXT_TOKENS.contains(child.getNode().getElementType())) {
return Behavior.TEXT;
} else {
return Behavior.STEALTH;
}
}
private boolean isChildOfLinkOrUrl(@NotNull PsiElement child) {
PsiElement parent = child.getParent();
return parent instanceof AsciiDocLink || parent instanceof AsciiDocUrl;
}
private static class LookingForMacroTextVisitor extends PsiElementVisitor {
private boolean found = false;
public boolean hasFound() {
return found;
}
@Override
public void visitElement(@NotNull PsiElement element) {
super.visitElement(element);
if (element.getNode().getElementType() == AsciiDocTokenTypes.MACROTEXT || element.getNode().getElementType() == AsciiDocTokenTypes.REFTEXT) {
found = true;
return;
}
PsiElement child = element.getFirstChild();
while (child != null && !found) {
visitElement(child);
child = child.getNextSibling();
}
}
}
public static boolean containsOnlySpaces(PsiElement child) {
return child.getNode().getChars().chars().noneMatch(c -> c != ' ');
}
public boolean isMyContextRoot(@NotNull PsiElement psiElement) {
if (psiElement instanceof AsciiDocAttributeDeclarationImpl &&
((AsciiDocAttributeDeclarationImpl) psiElement).hasSpellCheckableContent()) {
return true;
}
if (psiElement instanceof AsciiDocInlineMacro &&
((AsciiDocInlineMacro) psiElement).getMacroName().equals("footnote")) {
return true;
}
return NODES_TO_CHECK.contains(psiElement.getNode().getElementType())
|| psiElement instanceof PsiComment;
}
}
| src/main/java/org/asciidoc/intellij/grazie/AsciiDocLanguageSupport.java | package org.asciidoc.intellij.grazie;
import com.intellij.lang.ASTNode;
import com.intellij.psi.PsiComment;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiElementVisitor;
import com.intellij.psi.PsiWhiteSpace;
import com.intellij.psi.TokenType;
import com.intellij.psi.tree.TokenSet;
import org.asciidoc.intellij.lexer.AsciiDocTokenTypes;
import org.asciidoc.intellij.parser.AsciiDocElementTypes;
import org.asciidoc.intellij.psi.AsciiDocAttributeDeclarationImpl;
import org.asciidoc.intellij.psi.AsciiDocAttributeReference;
import org.asciidoc.intellij.psi.AsciiDocInlineMacro;
import org.asciidoc.intellij.psi.AsciiDocLink;
import org.asciidoc.intellij.psi.AsciiDocRef;
import org.asciidoc.intellij.psi.AsciiDocUrl;
import org.jetbrains.annotations.NotNull;
public class AsciiDocLanguageSupport {
public enum Behavior {
/**
* A PSI element that contains a nested text and should be ignored.
* <p>
* Example: The "Headline" is a nested element in the section; it is treated as its own sentence.
* <p>
* <pre>
* == Headline
* More text
* </pre>
* <p>
*/
ABSORB,
/**
* A PSI element that <b>WILL NOT</b> be printed, adjacent text is part of the same word.
* <p>
* Example: the "**" would be STEALTH<br>
* <code>**b**old</code> is one word "bold"
*/
STEALTH,
/**
* A PSI that contains text that should be spell and grammar checked.
* <p>
* Example: the "b" and "old" would be TEXT<br>
* <code>**b**old</code> is one word "bold"
*/
TEXT,
/**
* A PSI that that <b>WILL</b> be printed, adjacent text represents different words.
* <p>
* Example: the {@code ->} would be "SEPARATE"<br>
* {@code one->two} is two words "one" and "two"
*/
SEPARATE,
/**
* A PSI that that <b>WILL</b> be printed, but with unknown text.
* <p>
* Example: an attribute {@code a {attr}} would print some text <br>
* but the contents are unknown.
*/
UNKNOWN
}
// all tokens that contain full sentences that can be checked for grammar and spelling.
private static final TokenSet NODES_TO_CHECK = TokenSet.create(
AsciiDocTokenTypes.LINE_COMMENT,
AsciiDocTokenTypes.BLOCK_COMMENT,
AsciiDocTokenTypes.LITERAL_BLOCK,
AsciiDocTokenTypes.BLOCKREFTEXT,
AsciiDocElementTypes.SECTION,
AsciiDocElementTypes.TITLE,
AsciiDocElementTypes.BLOCK,
AsciiDocElementTypes.DESCRIPTION_ITEM,
AsciiDocElementTypes.LIST_ITEM,
AsciiDocElementTypes.CELL,
AsciiDocElementTypes.HEADING
);
/** All tokens that contain full sentences that can be checked for grammar and spelling.
* The contents of these tokens will not be forwarded to the grammar or spell checker.
* Example: END_OF_SENTENCE is in the Text category, as it needs to be passed to the grammar checker as a ".", "?" or other text,
* so that the grammar checker recognizes the end of a sentence.
*/
private static final TokenSet SEPARATOR_TOKENS = TokenSet.create(
AsciiDocTokenTypes.ARROW,
AsciiDocTokenTypes.LBRACKET,
AsciiDocTokenTypes.RBRACKET,
AsciiDocTokenTypes.LPAREN,
AsciiDocTokenTypes.RPAREN,
AsciiDocTokenTypes.DOUBLE_QUOTE,
AsciiDocTokenTypes.SINGLE_QUOTE,
AsciiDocTokenTypes.TYPOGRAPHIC_DOUBLE_QUOTE_START,
AsciiDocTokenTypes.TYPOGRAPHIC_DOUBLE_QUOTE_END,
AsciiDocTokenTypes.ASSIGNMENT,
AsciiDocTokenTypes.CELLSEPARATOR,
AsciiDocTokenTypes.BULLET,
AsciiDocTokenTypes.ENUMERATION,
AsciiDocTokenTypes.ADMONITION,
AsciiDocTokenTypes.CALLOUT
);
// all tokens that contain text that is part of a sentence and can be a sub-node of the elements above
private static final TokenSet TEXT_TOKENS = TokenSet.orSet(TokenSet.create(
AsciiDocTokenTypes.TEXT,
AsciiDocTokenTypes.ITALIC,
AsciiDocTokenTypes.BOLD,
AsciiDocTokenTypes.BOLDITALIC,
AsciiDocTokenTypes.MONO,
AsciiDocTokenTypes.MONOBOLD,
AsciiDocTokenTypes.DESCRIPTION,
AsciiDocTokenTypes.DESCRIPTION_END, // for now, keep this as text until it is split into its own root element
AsciiDocTokenTypes.MACROTEXT,
AsciiDocTokenTypes.BLOCKREFTEXT,
AsciiDocTokenTypes.REFTEXT,
AsciiDocTokenTypes.MONOITALIC,
AsciiDocTokenTypes.MONOBOLDITALIC,
AsciiDocTokenTypes.END_OF_SENTENCE,
AsciiDocTokenTypes.PASSTRHOUGH_CONTENT,
AsciiDocTokenTypes.LT,
AsciiDocTokenTypes.GT,
AsciiDocTokenTypes.TYPOGRAPHIC_SINGLE_QUOTE_START,
AsciiDocTokenTypes.TYPOGRAPHIC_SINGLE_QUOTE_END,
AsciiDocTokenTypes.LPAREN,
AsciiDocTokenTypes.RPAREN,
AsciiDocTokenTypes.LBRACKET,
AsciiDocTokenTypes.RBRACKET,
AsciiDocTokenTypes.BULLET,
AsciiDocTokenTypes.ATTRIBUTE_VAL, // will only get here if attribute is classified to contain spell checkable content
AsciiDocTokenTypes.ATTRIBUTE_CONTINUATION,
AsciiDocTokenTypes.ATTRIBUTE_CONTINUATION_LEGACY,
// keep the white space in here as blanks are necessary to separate words
AsciiDocTokenTypes.WHITE_SPACE,
AsciiDocTokenTypes.WHITE_SPACE_MONO,
AsciiDocTokenTypes.HEADING_TOKEN,
AsciiDocTokenTypes.HEADING_OLDSTYLE,
TokenType.WHITE_SPACE,
AsciiDocElementTypes.URL, // can nest MACROTEXT, or will show the URL_LINK or URL_EMAIL as default
AsciiDocElementTypes.REF, // can nest REFTEXT
AsciiDocElementTypes.LINK, // can nest MACROTEXT
AsciiDocElementTypes.INLINE_MACRO, // can nest MACROTEXT
AsciiDocElementTypes.MONO, // will nest MONO
AsciiDocElementTypes.ITALIC // will nest ITALIC
), NODES_TO_CHECK);
// all tokens are surrounded by spaces, but these spaces are not printed by AsciiDoc
// and should therefore not be passed to the grammar checker
private static final TokenSet SPACE_EATING_TOKENS = TokenSet.create(
AsciiDocTokenTypes.BULLET,
AsciiDocTokenTypes.ENUMERATION);
public Behavior getElementBehavior(@NotNull PsiElement root, @NotNull PsiElement child) {
if (root != child && NODES_TO_CHECK.contains(child.getNode().getElementType())) {
return Behavior.ABSORB;
} else if (root == child && child instanceof AsciiDocAttributeDeclarationImpl) {
if (((AsciiDocAttributeDeclarationImpl) child).hasSpellCheckableContent()) {
return Behavior.TEXT;
} else {
return Behavior.ABSORB;
}
} else if (child instanceof AsciiDocAttributeReference) {
return Behavior.UNKNOWN;
} else if (SEPARATOR_TOKENS.contains(child.getNode().getElementType())) {
return Behavior.SEPARATE;
} else if (root != child && child instanceof AsciiDocInlineMacro && ((AsciiDocInlineMacro) child).getMacroName().equals("footnote")) {
return Behavior.ABSORB;
} else if (spacesIgnoredByAsciiDoc(child)) {
return Behavior.SEPARATE;
} else if (
// A link or URL can contain either a macro text or no text.
// AsciiDoc will display the macro text, or the link/email address if no such text is provided.
// Pass on the content that would be displayed by AsciiDoc to the grammar check.
(child.getNode().getElementType() == AsciiDocTokenTypes.URL_LINK || child.getNode().getElementType() == AsciiDocTokenTypes.URL_EMAIL) &&
isChildOfLinkOrUrl(child)) {
boolean macroTextPresent = false;
ASTNode node = child.getNode();
while (node != null) {
if (node.getElementType() == AsciiDocTokenTypes.MACROTEXT) {
macroTextPresent = true;
break;
}
node = node.getTreeNext();
}
if (macroTextPresent) {
return Behavior.STEALTH;
} else {
return Behavior.TEXT;
}
} else if (child instanceof AsciiDocInlineMacro
|| (child instanceof AsciiDocLink && ((AsciiDocLink) child).getMacroName().equals("xref"))
|| (child instanceof AsciiDocRef)) {
// an inline macro or an xref will be treated as unknown if they don't contain text
LookingForMacroTextVisitor visitor = new LookingForMacroTextVisitor();
child.accept(visitor);
if (visitor.hasFound()) {
return Behavior.TEXT;
} else {
return Behavior.UNKNOWN;
}
} else if (TEXT_TOKENS.contains(child.getNode().getElementType())) {
return Behavior.TEXT;
} else {
return Behavior.STEALTH;
}
}
private boolean isChildOfLinkOrUrl(@NotNull PsiElement child) {
PsiElement parent = child.getParent();
return parent instanceof AsciiDocLink || parent instanceof AsciiDocUrl;
}
private static class LookingForMacroTextVisitor extends PsiElementVisitor {
private boolean found = false;
public boolean hasFound() {
return found;
}
@Override
public void visitElement(@NotNull PsiElement element) {
super.visitElement(element);
if (element.getNode().getElementType() == AsciiDocTokenTypes.MACROTEXT || element.getNode().getElementType() == AsciiDocTokenTypes.REFTEXT) {
found = true;
return;
}
PsiElement child = element.getFirstChild();
while (child != null && !found) {
visitElement(child);
child = child.getNextSibling();
}
}
}
private static boolean spacesIgnoredByAsciiDoc(@NotNull PsiElement child) {
return child instanceof PsiWhiteSpace && containsOnlySpaces(child) &&
((child.getPrevSibling() != null && SPACE_EATING_TOKENS.contains(child.getPrevSibling().getNode().getElementType()))
|| (child.getNextSibling() != null && SPACE_EATING_TOKENS.contains(child.getNextSibling().getNode().getElementType())));
}
public static boolean containsOnlySpaces(PsiElement child) {
return child.getNode().getChars().chars().noneMatch(c -> c != ' ');
}
public boolean isMyContextRoot(@NotNull PsiElement psiElement) {
if (psiElement instanceof AsciiDocAttributeDeclarationImpl &&
((AsciiDocAttributeDeclarationImpl) psiElement).hasSpellCheckableContent()) {
return true;
}
if (psiElement instanceof AsciiDocInlineMacro &&
((AsciiDocInlineMacro) psiElement).getMacroName().equals("footnote")) {
return true;
}
return NODES_TO_CHECK.contains(psiElement.getNode().getElementType())
|| psiElement instanceof PsiComment;
}
}
| as list items are now root tokens and texts will be trimmed, this check is no longer necessary (#908)
| src/main/java/org/asciidoc/intellij/grazie/AsciiDocLanguageSupport.java | as list items are now root tokens and texts will be trimmed, this check is no longer necessary (#908) |
|
Java | apache-2.0 | 94c8c0f69d29cac6e5274750287d4e687f41c38b | 0 | NLeSC/Xenon,NLeSC/Xenon | package nl.esciencecenter.xenon.adaptors.webdav;
import nl.esciencecenter.xenon.InvalidLocationException;
import nl.esciencecenter.xenon.adaptors.generic.Location;
public class WebdavLocation extends Location {
public WebdavLocation(String user, String host, int port) {
super(user, host, port);
}
protected WebdavLocation(String location) throws InvalidLocationException {
super(location, WebdavAdaptor.ADAPTOR_SCHEME.get(0));
}
public WebdavLocation(String location, String scheme) throws InvalidLocationException {
this(location);
if (getScheme() == null) {
//setScheme(scheme); // TODO: this method doesn't exist yet. Also, this is somewhat weird maybe considering the super with scheme parameter exists nowadays.
}
}
@Override
protected String getAdaptorName() {
return WebdavAdaptor.ADAPTOR_NAME;
}
@Override
protected int getDefaultPort() {
return WebdavAdaptor.DEFAULT_PORT;
}
public static WebdavLocation parse(String location) throws InvalidLocationException {
return new WebdavLocation(location);
}
public static WebdavLocation parse(String location, String scheme) throws InvalidLocationException {
return new WebdavLocation(location, scheme);
}
}
| src/main/java/nl/esciencecenter/xenon/adaptors/webdav/WebdavLocation.java | package nl.esciencecenter.xenon.adaptors.webdav;
import nl.esciencecenter.xenon.InvalidLocationException;
import nl.esciencecenter.xenon.adaptors.generic.Location;
public class WebdavLocation extends Location {
public WebdavLocation(String user, String host, int port) {
super(user, host, port);
}
protected WebdavLocation(String location) throws InvalidLocationException {
super(location, WebdavAdaptor.ADAPTOR_SCHEME.get(0));
}
public WebdavLocation(String location, String scheme) throws InvalidLocationException {
this(location);
if (getScheme() == null) {
//setScheme(scheme); // TODO: this method doesn't exist yet.
}
}
@Override
protected String getAdaptorName() {
return WebdavAdaptor.ADAPTOR_NAME;
}
@Override
protected int getDefaultPort() {
return WebdavAdaptor.DEFAULT_PORT;
}
@Override
public String toString() {
return super.toString() + getPath();
}
public static WebdavLocation parse(String location) throws InvalidLocationException {
return new WebdavLocation(location);
}
public static WebdavLocation parse(String location, String scheme) throws InvalidLocationException {
return new WebdavLocation(location, scheme);
}
}
| fixed bug in webdavlocation
| src/main/java/nl/esciencecenter/xenon/adaptors/webdav/WebdavLocation.java | fixed bug in webdavlocation |
|
Java | apache-2.0 | 071daad8da4d820a775c888867d7ba5272866b87 | 0 | loganathan001/AsyncHelper,loganathan001/AsyncHelper | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.vishag.async;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* The class SchedulingTaskTest.
*
* @author Loganathan.S <https://github.com/loganathan001>
*/
@RunWith(Parameterized.class)
public class SchedulingTaskTest {
/** The watcher. */
@Rule
public TestRule watcher = new TestWatcherAndLogger();
/** The scheduling task. */
private SchedulingTask schedulingTask;
/**
* Inputs.
*
* @return the collection
*/
@Parameters
public static Collection<Object[]> inputs() {
return Arrays.asList(new Object[][] {
{SchedulingTask.getDefault()},
{SchedulingTask.of(Executors
.newScheduledThreadPool(10)) },
{SchedulingTask.of(Executors
.newScheduledThreadPool(10), AsyncContext.newInstance()) }
});
}
/**
* Instantiates a new scheduling task test.
*
* @param schedulingTask the scheduling task
* @throws Exception the exception
*/
public SchedulingTaskTest(SchedulingTask schedulingTask) throws Exception {
this.schedulingTask = schedulingTask;
}
/**
* Test schedule task.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTask() throws InterruptedException {
int[] retVal = new int[3];
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTask(10, 100, TimeUnit.MILLISECONDS, true, () -> {
int index = count.getAndIncrement();
retVal[index] = (index + 1) * 10;
}, 3);
Thread.sleep(500);
assertArrayEquals(retVal, new int[] { 10, 20, 30 });
}
/**
* Test schedule tasks.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTasks() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTasks(10, 100, TimeUnit.MILLISECONDS, true, () -> {
retVal[0] = 10;
}, () -> {
retVal[1] = 20;
}, () -> {
retVal[2] = 30;
}, () -> {
retVal[3] = 40;
}, () -> {
retVal[4] = 50;
});
Thread.sleep(1000);
assertArrayEquals(retVal, new int[] { 10, 20, 30, 40, 50 });
}
/**
* Test schedule task single time.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskSingleTime() throws InterruptedException {
int[] retVal = new int[] { 0, 20, 20 };
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTask(10, TimeUnit.MILLISECONDS, () -> {
int index = count.getAndIncrement();
retVal[index] = (index + 1) * 10;
});
Thread.sleep(500);
assertArrayEquals(retVal, new int[] { 10, 20, 20 });
}
/**
* Test schedule tasks until flag.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTasksUntilFlag() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTasksUntilFlag(10, 100, TimeUnit.MILLISECONDS, true, "ScheduledMultipleTasksTest",
() -> {
TestUtil.print("Task 0");
TestUtil.printTime();
retVal[0] += 1;
}, () -> {
TestUtil.print("Task 1");
TestUtil.printTime();
retVal[1] += 1;
}, () -> {
TestUtil.print("Task 2");
TestUtil.printTime();
retVal[2] += 1;
}, () -> {
TestUtil.print("Task 3");
TestUtil.printTime();
retVal[3] += 1;
}, () -> {
TestUtil.print("Task 4");
TestUtil.printTime();
retVal[4] += 1;
});
Thread.sleep(1200);
schedulingTask.notifyFlag("ScheduledMultipleTasksTest");
assertTrue(retVal[0] > 1);
assertTrue(retVal[1] > 1);
assertTrue(retVal[2] > 1);
assertTrue(retVal[3] > 1);
assertTrue(retVal[4] > 1);
}
/**
* Test schedule tasks wait.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTasksWait() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTasksAndWait(0, 100, TimeUnit.MILLISECONDS, true, () -> {
TestUtil.printTime();
retVal[0] = 10;
}, () -> {
TestUtil.printTime();
retVal[1] = 20;
}, () -> {
TestUtil.printTime();
retVal[2] = 30;
}, () -> {
TestUtil.printTime();
retVal[3] = 40;
}, () -> {
TestUtil.printTime();
retVal[4] = 50;
});
TestUtil.printTime();
assertArrayEquals(retVal, new int[] { 10, 20, 30, 40, 50 });
}
/**
* Test schedule task until flag.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskUntilFlag() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTaskUntilFlag(10, 100, TimeUnit.MILLISECONDS, true, "ScheduledSingleTasksTest", () -> {
TestUtil.print("Count " + retVal[0]);
TestUtil.printTime();
retVal[0] += 1;
});
Thread.sleep(1200);
schedulingTask.notifyFlag("ScheduledSingleTasksTest");
assertTrue(retVal[0] > 5);
}
/**
* Test schedule task wait.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskWait() throws InterruptedException {
int[] retVal = new int[3];
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTaskAndWait(0, 300, TimeUnit.MILLISECONDS, true, () -> {
TestUtil.printTime();
int index = count.getAndIncrement();
retVal[index] = (index + 1) * 10;
}, 3);
TestUtil.printTime();
assertArrayEquals(retVal, new int[] { 10, 20, 30 });
}
/**
* Test schedule task wait single time.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskWaitSingleTime() throws InterruptedException {
int[] retVal = new int[] { 0, 20, 20 };
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTaskAndWait(10, TimeUnit.MILLISECONDS, () -> {
TestUtil.printTime();
retVal[count.getAndIncrement()] = 10;
});
TestUtil.printTime();
assertArrayEquals(retVal, new int[] { 10, 20, 20 });
}
/**
* Test close.
*
* @throws Exception the exception
*/
@Test
public void testClose() throws Exception {
SchedulingTask schedulingTask = SchedulingTask.of(Executors.newScheduledThreadPool(5));
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test1"));
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test2"));
schedulingTask.close();
schedulingTask.close();
assert(true);
}
/**
* Test close with exception.
*
* @throws Exception the exception
*/
@Test (expected=Exception.class)
public void testCloseWithException() throws Exception {
SchedulingTask schedulingTask = SchedulingTask.of(Executors.newScheduledThreadPool(5));
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test1"));
schedulingTask.close();
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test2"));
fail();
}
}
| Project/asynchelper/src/test/java/org/vishag/async/SchedulingTaskTest.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.vishag.async;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.Arrays;
import java.util.Collection;
import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TestRule;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
/**
* The class SchedulingTaskTest.
*
* @author Loganathan.S <https://github.com/loganathan001>
*/
@RunWith(Parameterized.class)
public class SchedulingTaskTest {
/** The watcher. */
@Rule
public TestRule watcher = new TestWatcherAndLogger();
/** The scheduling task. */
private SchedulingTask schedulingTask;
/**
* Inputs.
*
* @return the collection
*/
@Parameters
public static Collection<Object[]> inputs() {
return Arrays.asList(new Object[][] {
{SchedulingTask.getDefault()},
{SchedulingTask.of(Executors
.newScheduledThreadPool(10)) },
{SchedulingTask.of(Executors
.newScheduledThreadPool(10), AsyncContext.newInstance()) }
});
}
/**
* Instantiates a new scheduling task test.
*
* @param schedulingTask the scheduling task
* @throws Exception the exception
*/
public SchedulingTaskTest(SchedulingTask schedulingTask) throws Exception {
this.schedulingTask = schedulingTask;
}
/**
* Test schedule task.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTask() throws InterruptedException {
int[] retVal = new int[3];
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTask(10, 100, TimeUnit.MILLISECONDS, true, () -> {
int index = count.getAndIncrement();
retVal[index] = (index + 1) * 10;
}, 3);
Thread.sleep(500);
assertArrayEquals(retVal, new int[] { 10, 20, 30 });
}
/**
* Test schedule tasks.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTasks() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTasks(10, 100, TimeUnit.MILLISECONDS, true, () -> {
retVal[0] = 10;
}, () -> {
retVal[1] = 20;
}, () -> {
retVal[2] = 30;
}, () -> {
retVal[3] = 40;
}, () -> {
retVal[4] = 50;
});
Thread.sleep(1000);
assertArrayEquals(retVal, new int[] { 10, 20, 30, 40, 50 });
}
/**
* Test schedule task single time.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskSingleTime() throws InterruptedException {
int[] retVal = new int[] { 0, 20, 20 };
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTask(10, TimeUnit.MILLISECONDS, () -> {
int index = count.getAndIncrement();
retVal[index] = (index + 1) * 10;
});
Thread.sleep(500);
assertArrayEquals(retVal, new int[] { 10, 20, 20 });
}
/**
* Test schedule tasks until flag.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTasksUntilFlag() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTasksUntilFlag(10, 100, TimeUnit.MILLISECONDS, true, "ScheduledMultipleTasksTest",
() -> {
TestUtil.print("Task 0");
TestUtil.printTime();
retVal[0] += 1;
}, () -> {
TestUtil.print("Task 1");
TestUtil.printTime();
retVal[1] += 1;
}, () -> {
TestUtil.print("Task 2");
TestUtil.printTime();
retVal[2] += 1;
}, () -> {
TestUtil.print("Task 3");
TestUtil.printTime();
retVal[3] += 1;
}, () -> {
TestUtil.print("Task 4");
TestUtil.printTime();
retVal[4] += 1;
});
Thread.sleep(1200);
schedulingTask.notifyFlag("ScheduledMultipleTasksTest");
assertTrue(retVal[0] > 1);
assertTrue(retVal[1] > 1);
assertTrue(retVal[2] > 1);
assertTrue(retVal[3] > 1);
assertTrue(retVal[4] > 1);
}
/**
* Test schedule tasks wait.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTasksWait() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTasksAndWait(0, 100, TimeUnit.MILLISECONDS, true, () -> {
TestUtil.printTime();
retVal[0] = 10;
}, () -> {
TestUtil.printTime();
retVal[1] = 20;
}, () -> {
TestUtil.printTime();
retVal[2] = 30;
}, () -> {
TestUtil.printTime();
retVal[3] = 40;
}, () -> {
TestUtil.printTime();
retVal[4] = 50;
});
TestUtil.printTime();
assertArrayEquals(retVal, new int[] { 10, 20, 30, 40, 50 });
}
/**
* Test schedule task until flag.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskUntilFlag() throws InterruptedException {
int[] retVal = new int[5];
schedulingTask.scheduleTaskUntilFlag(10, 100, TimeUnit.MILLISECONDS, true, "ScheduledSingleTasksTest", () -> {
TestUtil.print("Count " + retVal[0]);
TestUtil.printTime();
retVal[0] += 1;
});
Thread.sleep(1200);
schedulingTask.notifyFlag("ScheduledSingleTasksTest");
assertTrue(retVal[0] > 5);
}
/**
* Test schedule task wait.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskWait() throws InterruptedException {
int[] retVal = new int[3];
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTaskAndWait(0, 300, TimeUnit.MILLISECONDS, true, () -> {
TestUtil.printTime();
int index = count.getAndIncrement();
retVal[index] = (index + 1) * 10;
}, 3);
TestUtil.printTime();
assertArrayEquals(retVal, new int[] { 10, 20, 30 });
}
/**
* Test schedule task wait single time.
*
* @throws InterruptedException
* the interrupted exception
*/
@Test
public void testScheduleTaskWaitSingleTime() throws InterruptedException {
int[] retVal = new int[] { 0, 20, 20 };
AtomicInteger count = new AtomicInteger(0);
schedulingTask.scheduleTaskAndWait(0, TimeUnit.SECONDS, () -> {
TestUtil.printTime();
retVal[count.getAndIncrement()] = 10;
});
TestUtil.printTime();
assertArrayEquals(retVal, new int[] { 10, 20, 20 });
}
/**
* Test close.
*
* @throws Exception the exception
*/
@Test
public void testClose() throws Exception {
SchedulingTask schedulingTask = SchedulingTask.of(Executors.newScheduledThreadPool(5));
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test1"));
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test2"));
schedulingTask.close();
schedulingTask.close();
assert(true);
}
/**
* Test close with exception.
*
* @throws Exception the exception
*/
@Test (expected=Exception.class)
public void testCloseWithException() throws Exception {
SchedulingTask schedulingTask = SchedulingTask.of(Executors.newScheduledThreadPool(5));
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test1"));
schedulingTask.close();
schedulingTask.scheduleTaskAndWait(1, TimeUnit.MILLISECONDS, () -> System.out.println("Test2"));
fail();
}
}
| Test fix. | Project/asynchelper/src/test/java/org/vishag/async/SchedulingTaskTest.java | Test fix. |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.